Hopefully this is not a too stupid question, but I am having trouble with aiohttp cookie processing.
Aiohttp's CookieJar class mentions it implements cookie storage adhering to RFC 6265, which states that:
cookies for a given host are shared across all the ports on that host
Cookies do not provide isolation by port. If a cookie is readable by a service running on one port, the cookie is also readable by a service running on another port of the same server.
But if I create two aiohttp servers, one that makes you "login" and gives you a cookie back, and another one with an endpoint that expects you to have a cookie, both hosted on localhost (two different ports I guess), the cookie will not be processed.
Here's a set of 4 tests using aiohttp, pytest, pytest and pytest-aiohttp to explain:
import functools
import pytest
from aiohttp import web
pytestmark = pytest.mark.asyncio
def attach_session(f):
#functools.wraps(f)
async def wrapper(request: web.Request):
session_id = request.cookies.get("testcookie")
request["mysession"] = session_id
response = await f(request)
response.set_cookie("testcookie", session_id)
return response
return wrapper
def is_logged_in(f):
#functools.wraps(f)
#attach_session
async def wrapper(request: web.Request):
session = request["mysession"]
if not session:
raise web.HTTPUnauthorized
return await f(request)
return wrapper
async def login(_: web.Request):
response = web.Response()
response.set_cookie("testcookie", "somerandomstring")
return response
#is_logged_in
async def some_endpoint(request: web.Request):
return web.Response(text="sweet")
#pytest.fixture
def auth_client(event_loop, aiohttp_client):
app = web.Application()
app.router.add_post("/login", login)
return event_loop.run_until_complete(aiohttp_client(app))
#pytest.fixture
def core_client(event_loop, aiohttp_client):
app = web.Application()
app.router.add_get("/some_endpoint", some_endpoint)
return event_loop.run_until_complete(aiohttp_client(app))
async def test_login(auth_client):
resp = await auth_client.post("/login")
assert resp.status == 200
assert resp.cookies.get("testcookie").value == "somerandomstring"
async def test_some_endpoint_anonymous(core_client):
resp = await core_client.get("/some_endpoint")
assert resp.status == 401
async def test_some_endpoint_as_logged_in(auth_client, core_client):
resp1 = await auth_client.post("/login")
resp2 = await core_client.get("/some_endpoint", cookies=resp1.cookies)
assert resp2.status == 401
async def test_some_endpoint_as_logged_in_again(auth_client, core_client):
resp1 = await auth_client.post("/login")
_cookie = list(resp1.cookies.values())[0]
resp2 = await core_client.get(
"/some_endpoint", cookies={_cookie.key: _cookie.value}
)
assert resp2.status == 200
But from my understanding, the "test_some_endpoint_as_logged_in" test should work. Why is it returning 401, while the same thing but with sending the cookie as a dict returns 200?
I think the correct way of sharing the cookies between clients would be loading the SimpleCookie object of the resp1 to the core_client.session.cookie_jar.
Changing the code of the test_some_endpoint_as_logged_in to should fix it:
async def test_some_endpoint_as_logged_in(auth_client, core_client):
resp1 = await auth_client.post("/login")
core_client.session.cookie_jar.update_cookies(resp1.cookies)
resp2 = await core_client.get("/some_endpoint")
assert resp2.status == 401
Cookie data is kept in the session object as the auth_client and core_client are different sessions with there own data cookie data is not shared. It is comparable to using a different browser with each there own cookie_jar.
Related
I have an issue in a personal project. I have developed a class to encapsulate an Http client which use aiohttp so it uses async code.
I have several classes (related to different REST services) that use this client by composition.
In every method of the service classes, I call the http client.
The code of the Http client looks like (I have simplified for the example):
class HttpClientSession:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
if not http_url:
raise Exception("Http URL is invalid")
self._url: str = http_url
#: Server authentication
self._auth: Optional[BasicAuth] = auth
#: HTTP session
self._session: Optional[ClientSession] = None
logger.debug("Http client created")
async def __aenter__(self):
"""
Create Http session to send requests.
"""
self._session = ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
)
return self
async def __aexit__(self, *err):
"""
Close Http session.
"""
await self._session.close()
self._session = None
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
async with self._session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
class HttpClient:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
#: HTTP session
self._session: HttpClientSession = HttpClientSession(http_url, auth)
logger.debug("Http client created")
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
async with self._session as session:
return await session.get(suffix_url,headers, query)
To be more efficient, I would like to be able to reuse an Http Session that's why I have created a session class which permit me to use the async with syntax by the caller but if it is not needed, the caller can call directly the HttpClient method which create a dedicate session.
So it is convenient because I can write:
http_client = HttpClient(...)
http_client.get(...)
or
http_client = HttpClient(...)
async with http_client as http_session:
http_session.get(...)
http_session.get(...)
So great it works but now my issue is I would like to do the same for user of services classes to be also able to reuse session or not. I'm a bit stuck but my intention is to have this syntax also :
client = ServiceClient(...)
client.do_something(...) # do_something will call http_client.get(...)
or
client = ServiceClient(...)
async with client as session:
session.do_something(...) # do_something will call http_session.get(...)
session.do_something_else(...) # do_something_else will reuse http_session
but I don't want to do every time:
client = ServiceClient(...)
async with client as session:
session.do_something(...) # session is not reuse so I don't want boilerplate of async with
I have tried to define __aenter__ method but I haven't found an elegant way to avoid duplication of code.
Have you some ideas ?
I have tried to add the same pattern used in HttpClient and HttpClientSession classes in services classes but haven't succeed to have ServiceClient call HttpClient and ServiceClientSession call HttpClientSession.
And in fact I'm even not sure it is the better pattern to do that.
I have found one solution.
For the HttpClient I have refactor to remove the HttpClientSession class and manage optional session:
class HttpClient:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
if not http_url:
raise Exception("Http URL is invalid")
self._url: str = http_url
#: Server authentication
self._auth: Optional[BasicAuth] = auth
#: HTTP session
self._session: Optional[ClientSession] = None
logger.debug("Http client created")
async def __aenter__(self):
"""
Create Http session to send requests.
"""
self._session = ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
)
return self
async def __aexit__(self, *err):
"""
Close Http session.
"""
await self._session.close()
self._session = None
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
if self._session:
async with self._session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
else:
async with ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
) as session:
async with session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
And in service class, I have defined also the methods to support async with syntax:
async def __aenter__(self):
"""
Create session to send requests.
"""
await self._http_client.__aenter__()
return self
async def __aexit__(self, *err):
"""
Close session.
"""
await self._http_client.__aexit__(*err)
So it works but I don't know if there is a better way to do it because I'm not fan of the if-else statement.
I'm trying to communicate with a fingerprint device. Actaully it sends data through a websocket connection. So, I think I can communicate with the device using webscokets. Here I'm using FastAPI, but it only accepts JSON data. The problem is that I need to handle XML data, however, I do not know how to send and accept data in XML format.
FastAPI can accept and validate other types of data as well, not only JSON as you stated. Have a look at the documentation. Regarding XML, as FastAPI is actually Starlette underneath, you can use Starlette's Request object directly to read the request body as bytes, and return a custom Response with the XML data (if required). You can check if the incoming request is of the required Content-Type, and if so, let it through; otherwise, you could raise an HTTPException. Below is a working example using Python requests on client side and a normal HTTP endpoint on server side.
Using HTTP Protocol
app.py
from fastapi import FastAPI, Response, Request, HTTPException
app = FastAPI()
#app.post("/submit")
async def submit(request: Request):
content_type = request.headers['Content-Type']
if content_type == 'application/xml':
body = await request.body()
return Response(content=body, media_type="application/xml")
else:
raise HTTPException(status_code=400, detail=f'Content type {content_type} not supported')
test.py
import requests
body = """<?xml version='1.0' encoding='utf-8'?><a>б</a>"""
headers = {'Content-Type': 'application/xml'}
url = 'http://127.0.0.1:8000/submit'
r = requests.post(url, data=body.encode('utf-8'), headers=headers)
print(r.content)
In websockets, you can use send_bytes() and receive_bytes() for the communication, as described in Starlette's documentation, allowing you to send and receive (byte encoded) XML data as well. If you would like to perform validation on the received XML data, have a look at this answer.
Using WebSocket Protocol
app.py
from fastapi import FastAPI, Request, WebSocket, WebSocketDisconnect
import uvicorn
app = FastAPI()
#app.websocket("/ws")
async def get_stream(websocket: WebSocket):
await websocket.accept()
try:
while True:
contents = await websocket.receive_bytes()
print(str(contents, 'utf-8'))
except WebSocketDisconnect:
print("Client disconnected")
if __name__ == '__main__':
uvicorn.run(app, host='127.0.0.1', port=8000)
test.py
import websockets
import asyncio
async def main():
url = 'ws://127.0.0.1:8000/ws'
async with websockets.connect(url) as ws:
while True:
b = bytes("<?xml version='1.0' encoding='utf-8'?><a>б</a>", 'utf-8')
await ws.send(b)
await asyncio.sleep(1)
asyncio.run(main())
I'm trying to test Flask REST-API end points with pytest using test_client(). But I'm getting an Error saying
> RuntimeError: You cannot use AsyncToSync in the same thread as an async event loop - just await the async function directly.
Can Anyone explain me why this happen and what is the solution to avoid this Error.
Test Functiob:
import pytest
from unittest import mock
from flask import request
from app import create_app
from app.base.views import views
app = create_app()
async def save_token_(sugar, payload, instance, bool, domain_id):
return {'valid':True}
payload = {
"password": 'password',
"username": 'crm_admin',
"grant_type": "password"
}
#pytest.mark.asyncio
async def test_post_sugar_token(monkeypatch, aiohttp_client, loop):
mock_save_token = mock.AsyncMock(name = "mock_save_token")
mock_save_token.return_value = await save_token_(None, payload, 'domain.org', True, 89)
monkeypatch.setattr(views, 'save_token', mock_save_token)
await views.save_token(None, payload, 'domain.org', True, 9)
assert mock_save_token.call_args_list == [mock.call(None, payload, 'domain.org', True, 89)]
headers = {'Autherization': 'ehrdmek2492.fkeompvmw.04294002'}
data = {
'password': '12345',
'key':'Hi',
'instance': 'my.domain',
'domain_id': 1
}
##-# using test_client()
client = app.test_client()
res = client.post('/token/sugar/', data = data, headers = headers)
assert res.status_code == 200
assert res.content_type == 'application/json'
assert res.json == {'valid':True}
# # ----------------------------------------------------------------------
Error Message
I ran into the same issue and opened a ticket on flask's github repo:
https://github.com/pallets/flask/issues/4375.
They kindly explained the issue and provided a workaround.
In short, flask can handle async views, and flask's test_client can be used in an async context, but you cannot use both at the same time.
Quoting from the github issue:
The problem in this case is that the Flask codebase is not compatible with asyncio, so you cannot run the test client inside an asyncio loop. There is really nothing to gain from writing your unit test as an async test, since Flask itself isn't async.
Here's the workaround suggested, slightly re-adjusted for your example:
#pytest.mark.asyncio
async def test_post_sugar_token():
# ... same code as before
##-# using test_client()
def sync_test():
with app.test_client() as client:
res = client.post('/token/sugar/', data = data, headers = headers)
assert res.status_code == 200
assert res.content_type == 'application/json'
assert res.json == {'valid':True}
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, sync_test)
I am trying to make a request to server A, where the response will be a list of requests, which I will make to server B.
Currently request to server A is just a simple sync request like this:
import requests
req = requests.get('https://server-a.com')
data = req.json()
list_of_requests = data['requests'] # requests for server B
Since list_of_requests can be a few thousand items long, I would like to use async to speed up the requests to B.
I've looked at several examples of async HTTP requests using aiohttp, such as from
https://towardsdatascience.com/fast-and-async-in-python-accelerate-your-requests-using-asyncio-62dafca83c33
import aiohttp
import asyncio
import os
from aiohttp import ClientSession
GOOGLE_BOOKS_URL = "https://www.googleapis.com/books/v1/volumes?q=isbn:"
LIST_ISBN = [
'9780002005883',
'9780002238304',
'9780002261982',
'9780006163831',
'9780006178736',
'9780006280897',
'9780006280934',
'9780006353287',
'9780006380832',
'9780006470229',
]
def extract_fields_from_response(response):
"""Extract fields from API's response"""
item = response.get("items", [{}])[0]
volume_info = item.get("volumeInfo", {})
title = volume_info.get("title", None)
subtitle = volume_info.get("subtitle", None)
description = volume_info.get("description", None)
published_date = volume_info.get("publishedDate", None)
return (
title,
subtitle,
description,
published_date,
)
async def get_book_details_async(isbn, session):
"""Get book details using Google Books API (asynchronously)"""
url = GOOGLE_BOOKS_URL + isbn
try:
response = await session.request(method='GET', url=url)
response.raise_for_status()
print(f"Response status ({url}): {response.status}")
except HTTPError as http_err:
print(f"HTTP error occurred: {http_err}")
except Exception as err:
print(f"An error ocurred: {err}")
response_json = await response.json()
return response_json
async def run_program(isbn, session):
"""Wrapper for running program in an asynchronous manner"""
try:
response = await get_book_details_async(isbn, session)
parsed_response = extract_fields_from_response(response)
print(f"Response: {json.dumps(parsed_response, indent=2)}")
except Exception as err:
print(f"Exception occured: {err}")
pass
async with ClientSession() as session:
await asyncio.gather(*[run_program(isbn, session) for isbn in LIST_ISBN])
However, all of the examples I have looked at start with the list of requests already defined. My question is, what is the proper pythonic way/pattern of combining a single sync request and then using that request to 'spawn' async tasks?
Thanks a bunch!
Using the below route definition, I am trying to extract the book_id out of the URL in aiohttp.
from aiohttp import web
routes = web.RouteTableDef()
#routes.get('/books/{book_id}')
async def get_book_pages(request: web.Request) -> web.Response:
book_id = request.match_info.get('book_id', None)
return web.json_response({'book_id': book_id})
Below is the test (using pytest) I have written
import asynctest
import pytest
import json
async def test_get_book() -> None:
request = make_mocked_request('GET', '/books/1')
response = await get_book(request)
assert 200 == response.status
body = json.loads(response.body)
assert 1 == body['book_id']
Test Result:
None != 1
Expected :1
Actual :None
Outside of the tests, when I run a request to /books/1 the response is {'book_id': 1}
What is the correct way to retrieve dynamic values from the path in aiohttp when mocking the request?
make_mocked_request() knows nothing about an application and its routes.
To pass dynamic info you need to provide a custom match_info object:
async def test_get_book() -> None:
request = make_mocked_request('GET', '/books/1',
match_info={'book_id': '1'})
response = await get_book(request)
assert 200 == response.status
body = json.loads(response.body)
assert 1 == body['book_id']
P.S.
In general, I want to warn about mocks over-usage. Usually, functional testing with aiohttp_client is easier to read and maintain.
I prefer mocking for really hard-to-rest things like network errors emulation.
Otherwise your tests do test your own mocks, not a real code.