How to return aiohttp like python requests? - python

I wanna return response list request.get or post
But, I can't...
First, I tryied return response
but it returned coroutine object....
Here is my code
import aiohttp
from aiohttp import ClientSession as AioClientSession
class MultiSession(object):
def __init__(self, method, urls, **kwargs):
self.method = method
self.urls = urls
self.kwargs = kwargs
self.loop = asyncio.get_event_loop()
async def get(self, session, url, **kwargs):
async with session.get(url, **kwargs) as response:
return await response.json()
async def post(self, session, url, **kwargs):
async with session.post(url, **kwargs) as response:
return await response.text()
async def fetch_all(self):
async with AioClientSession(loop=self.loop) as session:
if self.method == "GET":
results = await asyncio.gather(*[self.get(session, url, **self.kwargs) for url in self.urls])
elif self.method == "POST":
results = await asyncio.gather(*[self.post(session, url, **self.kwargs) for url in self.urls])
else:
assert False
return results
def run_until_complete(self):
return self.loop.run_until_complete(self.fetch_all())
And this my test code
from utils import multi_requests_get
urls = ["https://httpbin.org/get?{}={}".format(x, x) for x in range(10)]
result = multi_requests_get(urls=urls)
assert result
assert result[0]["args"] == {"0": "0"}
but, i want like this
from utils import multi_requests_get
urls = ["https://httpbin.org/get?{}={}".format(x, x) for x in range(10)]
result = multi_requests_get(urls=urls)
assert result
assert result[0].status_code == 200
assert result[0].json()["args"] == {"0": "0"}
It looks like python requests module...
How can like request?
It's possible...?
please hele me...

I solved this ask
this is my code
from requests import Response
from aiohttp import ClientSession as AioClientSession
class MultiSession(object):
def __init__(self, method, urls, **kwargs):
self.method = method
self.urls = urls
self.kwargs = kwargs
self.loop = asyncio.get_event_loop()
def wrapping_response_instance(self, aio_response, content):
response = Response()
response._content = content
response.url = str(aio_response.url)
response.status_code = aio_response.status
headers = {row[0]: row[1] for row in aio_response.headers.items()}
response.headers = headers
return response
async def get(self, session, url, **kwargs):
async with session.get(url, **kwargs) as aio_response:
content, _ = await aio_response.content.readchunk()
response = self.wrapping_response_instance(aio_response, content)
return response
async def post(self, session, url, **kwargs):
async with session.post(url, **kwargs) as aio_response:
content, _ = await aio_response.content.readchunk()
response = self.wrapping_response_instance(aio_response, content)
return response
async def fetch_all(self):
async with AioClientSession(loop=self.loop) as session:
# task 생성
if self.method == "GET":
results = await asyncio.gather(*[self.get(session, url, **self.kwargs) for url in self.urls])
elif self.method == "POST":
results = await asyncio.gather(*[self.post(session, url, **self.kwargs) for url in self.urls])
else:
assert False
return results
def run_until_complete(self):
return self.loop.run_until_complete(self.fetch_all())
def multi_requests_get_json(urls, params=None, **kwargs):
session = MultiSession(method="GET", urls=urls, params=params, **kwargs)
return session.run_until_complete()
def multi_requests_post_json(urls, data=None, **kwargs):
session = MultiSession(method="POST", urls=urls, data=data, **kwargs)
return session.run_until_complete()
the wrapping_response_instance is convert Response()

Related

How to write offline test cases for FastAPI dependency?

I currently have the following code for my classification model server. The classifier is passed as a dependency to the index (/) function.
# classifier.py
import asyncio
import httpx
class Classifier():
def __init__(
self,
concurrency_limit,
) -> None:
self.client = httpx.AsyncClient()
self.semaphore = asyncio.Semaphore(concurrency_limit)
async def download_async(self, url):
async with self.semaphore:
response = await self.client.get(url)
return await response.aread()
async def run(
self, image_urls
):
image_list = await asyncio.gather(
*[self.download_async(url) for i, url in enumerate(image_urls)]
)
# Infer Images
pass
# api.py
async def classifier_dependency() -> Classifier:
return Classifier(
concurrency_limit=constants.CONCURRENCY_LIMIT,
)
#server.post("/")
async def index(
data, classifier = Depends(classifier_dependency)
) -> Dict:
results = await classifier.run(data.images)
I am trying to write tests for the API which can be run offline. I basically want to much the response from httpx.get(). Here is what I am currently doing.
# test_api.py
class AsyncMock(MagicMock): # Not needed if using Python 3.8
async def __call__(self, *args, **kwargs):
return super(AsyncMock, self).__call__(*args, **kwargs)
def update_mock_dependency(image_bytes):
response = AsyncMock(name="Response")
response.aread.return_value = image_bytes
async def override_dependency():
classifier = Classifier(
concurrency_limit=constants.CONCURRENCY_LIMIT,
)
async def f(_):
return response
classifier.client.get = f
return classifier
server.dependency_overrides[classifier_dependency] = override_dependency
def test_successful_inference(image_bytes, image_urls):
"""
Test that the model output is similar to the expected output.
"""
update_mock_dependency(image_bytes)
data = {"images": image_urls}
response = client.post("/", json=data)
assert response.status_code == 200,
I'm not sure how to go about it right now in a clean way. Is there a better alternative using mock.patch instead of manually overriding the httpx.get function?

How to ensure existing only one coroutine of a specific kind

in my class I have a method that fetches the website (visible below).
I've noticed that other methods that use this method, can lead to the opening of multiple requests to one site (when one request is pending self._page is still none).
How can I avoid it?
I mean when there is another call to _get_page but one is pending, just return a future from the first one and don't repeat a page request
async def _get_page(self) -> HtmlElement:
if self._page is None:
async with self._get_session().get(self._url) as page:
self._page = lxml.html.document_fromstring(await page.text())
return self._page
How can I avoid [multiple requests]?
You could use an asyncio.Lock:
saync def __init__(self, ...):
...
self._page_lock = asyncio.Lock()
async def _get_page(self) -> HtmlElement:
async with self._page_lock:
if self._page is None:
async with self._get_session().get(self._url) as page:
self._page = lxml.html.document_fromstring(await page.text())
return self._page
Update for Python 3.8 and jupyter notebook
import asyncio
import aiohttp
from lxml import html
class MyClass:
def __init__(self):
self._url = 'https://www.google.com'
self._page = None
self._futures = []
self._working = False
self._session = aiohttp.ClientSession()
async def _close(self):
if self._session:
session = self._session
self._session = None
await session.close()
def _get_session(self):
return self._session
async def _get_page(self):
if self._page is None:
if self._working:
print('will await current page request')
loop = asyncio.get_event_loop()
future = loop.create_future()
self._futures.append(future)
return await future
else:
self._working = True
session = self._get_session()
print('making url request')
async with session.get(self._url) as page:
print('status =', page.status)
print('making page request')
self._page = html.document_fromstring(await page.text())
print('Got page text')
for future in self._futures:
print('setting result to awaiting request')
future.set_result(self._page)
self._futures = []
self._working = False
return self._page
async def main():
futures = []
m = MyClass()
futures.append(asyncio.ensure_future(m._get_page()))
futures.append(asyncio.ensure_future(m._get_page()))
futures.append(asyncio.ensure_future(m._get_page()))
results = await asyncio.gather(*futures)
for result in results:
print(result[0:80])
await m._close()
if __name__ == '__main__':
asyncio.run(main())
#await main() # In jupyter notebook and iPython
Note that on Windows 10 I have seen at termination:
RuntimeError: Event loop is closed
See https://github.com/aio-libs/aiohttp/issues/4324

Receiving streaming data after implementing asyncio websockets as a class?

My question is closely related to the following question on Stackoverflow and the documentation here.
I am defining a websockets-connection as a class. Next, I create a new class where I call the earlier defined websocket-class as self.ws and tell which data to send to the websocket with self.request.
My problem is that the current script only runs once, whereas my desired output is continuous data.
The second link shows that I can retrieve continuous / streaming data using
asyncio.get_event_loop().run_until_complete(call_api(json.dumps(msg)))
I include all of the above code in my code (call_api is defined differently due to the desire to write it as a class). Below is my code:
import sys, json
import asyncio
from websockets import connect
class EchoWebsocket:
def __init__(self, URL, CLIENT_ID=None, CLIENT_SECRET=None):
self.url = URL
self.client_id = CLIENT_ID
self.client_secret = CLIENT_SECRET
async def __aenter__(self):
self._conn = connect(self.url)
self.websocket = await self._conn.__aenter__()
return self
async def __aexit__(self, *args, **kwargs):
await self._conn.__aexit__(*args, **kwargs)
async def send(self, message):
await self.websocket.send(message)
async def receive(self):
return await self.websocket.recv()
class DERIBIT:
def __init__(self):
self.ws = EchoWebsocket(URL='wss://test.deribit.com/ws/api/v2')
self.loop = asyncio.get_event_loop()
self.request = \
{"jsonrpc": "2.0",
"method": "public/subscribe",
"id": 42,
"params": {
"channels": ["deribit_price_index.btc_usd"]}
}
def get_ticks(self):
return self.loop.run_until_complete(self.__async__get_ticks())
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
response = await echo.receive()
print(response)
if __name__ == "__main__":
deribit = DERIBIT()
deribit.get_ticks()
This script gives the following output:
{"jsonrpc": "2.0", "method": "public/subscribe", "id": 42, "params": {"channels": ["deribit_price_index.btc_usd"]}}
whereas I would like to see
Please advice.
I only worked with Tornado's websockets but they work pretty well and Tornado has many helpers for dealing with async code:
import json
import tornado
from tornado.ioloop import PeriodicCallback
from tornado.websocket import websocket_connect
class EchoWebsocket:
def __init__(self, url, client_id=None, client_secret=None):
self.url = url
self.client_id = client_id
self.client_secret = client_secret
self.websocket = None
async def connect(self):
if not self.websocket:
self.websocket = await websocket_connect(self.url)
async def close(self):
await self.websocket.close()
self.websocket = None
async def read(self):
return await self.websocket.read_message()
async def write(self, message):
await self.websocket.write_message(message)
class DERIBIT:
def __init__(self):
self.ws = EchoWebsocket(url='wss://test.deribit.com/ws/api/v2')
self.request = {
"jsonrpc": "2.0",
"method": "public/subscribe",
"id": 42,
"params": {
"channels": ["deribit_price_index.btc_usd"]}
}
self.callback = PeriodicCallback(self.get_ticks, 1000)
self.callback.start()
async def get_ticks(self):
if not self.ws.websocket:
await self.ws.connect()
await self.ws.write(json.dumps(self.request))
response = await self.ws.read()
print(response)
if __name__ == "__main__":
deribit = DERIBIT()
tornado.ioloop.IOLoop.current().start()
Output:
{"jsonrpc":"2.0","id":42,"result":["deribit_price_index.btc_usd"],"usIn":1587298852138977,"usOut":1587298852139023,"usDiff":46,"testnet":true}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298851526,"price":7173.46,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298852533,"price":7173.53,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","id":42,"result":["deribit_price_index.btc_usd"],"usIn":1587298852932540,"usOut":1587298852932580,"usDiff":40,"testnet":true}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298852533,"price":7173.53,"index_name":"btc_usd"}}}
The example above could be simplified a lot if you integrate the websocket into the DERIBIT class rather than create a separate class for it.
the problem is in the function
first loop.run_until_complete run until the future is complete doc run_until_complete
that mean your function receive will run only one response. run_until_complete is not a callback function!.
so in your case the main:
deribit.get_ticks() -> run the future instance __async__get_ticks
so __async__get_ticks is task: let's see what the task do:
1.open ws connection:
2.send request
3.wait the response of the ws
4. print(response)
here the task is done that why you see only one line
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
response = await echo.receive()
print(response)
after explanation: the solution will be simple:
need to wrap the line response
with while
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
while True:
response = await echo.receive()
print(response)
output
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654476817,"price":7540.54,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654477824,"price":7540.52,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654478831,"price":7540.15,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654479838,"price":7539.83,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654480845,"price":7539.2,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654481852,"price":7538.96,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654482859,"price":7538.9,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654483866,"price":7538.89,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654484873,"price":7538.47,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654485880,"price":7537.15,"index_name":"btc_usd"}}}

How do I Mock the coroutine json() when using aiohttp.ClientSession.get

I want to mock the json() coroutine from the aiohttp.ClientSession.get method. It looks to return an async generator object, which is where I'm confused on how to mock in my example. Here is my code:
async def get_access_token():
async with aiohttp.ClientSession(auth=auth_credentials) as client:
async with client.get(auth_path, params={'grant_type': 'client_credentials'}) as auth_response:
assert auth_response.status == 200
auth_json = await auth_response.json()
return auth_json['access_token']
This is my test case to mock the get method:
json_data = [{
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}]
class AsyncMock:
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
#pytest.mark.asyncio
async def test_wow_api_invalid_credentials(monkeypatch, mocker):
def mock_client_get(self, auth_path, params):
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json = mocker.MagicMock(return_value=json_data)
return mock_response
monkeypatch.setattr('wow.aiohttp.ClientSession.get', mock_client_get)
result = await wow.get_access_token()
assert result == 'HSG9hsf328bJSWO82sl'
I think the problem might be that mock_response.json() is not awaitable. In my example I can't call await from a non async function so I'm confused on how I would do that. I would like to keep the test libraries to a minimum which is pytest and pytest-asyncio for the learning experiencing and to rely less on 3rd party libraries.
I was making it more complicated than it needed to be. I simply defined json as an awaitable attribute of AsyncMock which returns the json_data. The complete code looks like this:
json_data = {
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}
class AsyncMock:
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
async def json(self):
return json_data
#pytest.mark.asyncio
async def test_wow_api_invalid_credentials(monkeypatch):
def mock_client_get(self, auth_path, params):
mock_response = AsyncMock()
mock_response.status = 200
return mock_response
monkeypatch.setattr('wow.aiohttp.ClientSession.get', mock_client_get)
result = await wow.get_access_token()
assert result == 'HSG9hsf328bJSWO82sl'
This is part 1, but i suggest you watch part2.
Im not sure i understand your question totally, because using async def or #asyncio.coroutine can help you do this. Actually, i want to write it as comment, however there are so many differences that i can't put it into comment.
import asyncio
json_ = [{
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}]
async def response_from_sun():
return json_
class AsyncMock:
async def specify(self):
return self.json[0].get("access_token")
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
async def mock_client_get():
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json = await response_from_sun()
return mock_response
async def go():
resp = await mock_client_get()
result = await resp.specify()
assert result == 'HSG9hsf328bJSWO82sl'
asyncio.get_event_loop().run_until_complete(go())
PART2
After adding my answer, i found there is a problem about your mock_response content. Becausemock_response does not contain variable and function which ClientResponse have.
Edit: I try many times and watch ClientSession's code, then i found you can specify a new response class by its parameter. Note: connector=aiohttp.TCPConnector(verify_ssl=False) is unnecessary
import asyncio
import aiohttp
class Mock(aiohttp.ClientResponse):
print("Mock")
async def specify(self):
json_ = (await self.json()).get("hello")
return json_
async def go():
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),response_class=Mock) as session:
resp = await session.get("https://www.mocky.io/v2/5185415ba171ea3a00704eed")
result = await resp.specify()
print(result)
assert result == 'world'
asyncio.get_event_loop().run_until_complete(go())

Python Mock with Aiohttp isn't mocking

I'm using AioHttp to implement a service at work, and during my tests, I'm mocking a method, the issue is there the call for the method is calling the real instead of the mocked method.
#unittest_run_loop
#patch('export_api.main.add_job_to_db')
async def test_view_job(self, mocked_method):
json = {
"edl": "somedata"
}
response = await self.client.request("PUT", "/v1/job", json=json)
mocked_method.assert_called_once_with()
assert response.status == 200
So I get this error on the assertion of the mock:
msg = "Expected 'add_job_to_db' to be called once. Called 0 times."
My method on the main.py:
async def __call__(self, request):
"""Faz post do Job na fila do Render"""
data = await request.json()
job_id = uuid.uuid4()
job = Jobs(
job_id=str(job_id),
body=data
)
try:
add_job_to_db(self.app['db'], job)
return web.Response(status=200)
except DatabaseError as e:
print(e)
return web.Response(status=500)
Yes, is a callable method inside a class. The test work fine without the mocking. But I need to mock the call for the db, and I'm not having luck so far.
Any ideas?
Checklist:
Use asynctest package
Don't forget to use #asyncio.coroutine decorator around the unit test
Patch the object where it is used, in this case it export_api.main.add_job_to_db
I'm the author of mocket and few days ago I released the version 2.0.0 which fully supports asyncio/aiohttp.
Here is the same example of code mocking a URL on HTTP and on HTTPS:
import aiohttp
import asyncio
import async_timeout
from unittest import TestCase
from mocket.mocket import mocketize
from mocket.mockhttp import Entry
class AioHttpEntryTestCase(TestCase):
#mocketize
def test_http_session(self):
url = 'http://httpbin.org/ip'
body = "asd" * 100
Entry.single_register(Entry.GET, url, body=body, status=404)
Entry.single_register(Entry.POST, url, body=body*2, status=201)
async def main(l):
async with aiohttp.ClientSession(loop=l) as session:
with async_timeout.timeout(3):
async with session.get(url) as get_response:
assert get_response.status == 404
assert await get_response.text() == body
with async_timeout.timeout(3):
async with session.post(url, data=body * 6) as post_response:
assert post_response.status == 201
assert await post_response.text() == body * 2
loop = asyncio.get_event_loop()
loop.set_debug(True)
loop.run_until_complete(main(loop))
#mocketize
def test_https_session(self):
url = 'https://httpbin.org/ip'
body = "asd" * 100
Entry.single_register(Entry.GET, url, body=body, status=404)
Entry.single_register(Entry.POST, url, body=body*2, status=201)
async def main(l):
async with aiohttp.ClientSession(loop=l) as session:
with async_timeout.timeout(3):
async with session.get(url) as get_response:
assert get_response.status == 404
assert await get_response.text() == body
with async_timeout.timeout(3):
async with session.post(url, data=body * 6) as post_response:
assert post_response.status == 201
assert await post_response.text() == body * 2
loop = asyncio.get_event_loop()
loop.set_debug(True)
loop.run_until_complete(main(loop))
Source: https://github.com/mindflayer/python-mocket/blob/master/tests/tests35/test_http_aiohttp.py

Categories