Design pattern with async with nested classes in Python - python

I have an issue in a personal project. I have developed a class to encapsulate an Http client which use aiohttp so it uses async code.
I have several classes (related to different REST services) that use this client by composition.
In every method of the service classes, I call the http client.
The code of the Http client looks like (I have simplified for the example):
class HttpClientSession:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
if not http_url:
raise Exception("Http URL is invalid")
self._url: str = http_url
#: Server authentication
self._auth: Optional[BasicAuth] = auth
#: HTTP session
self._session: Optional[ClientSession] = None
logger.debug("Http client created")
async def __aenter__(self):
"""
Create Http session to send requests.
"""
self._session = ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
)
return self
async def __aexit__(self, *err):
"""
Close Http session.
"""
await self._session.close()
self._session = None
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
async with self._session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
class HttpClient:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
#: HTTP session
self._session: HttpClientSession = HttpClientSession(http_url, auth)
logger.debug("Http client created")
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
async with self._session as session:
return await session.get(suffix_url,headers, query)
To be more efficient, I would like to be able to reuse an Http Session that's why I have created a session class which permit me to use the async with syntax by the caller but if it is not needed, the caller can call directly the HttpClient method which create a dedicate session.
So it is convenient because I can write:
http_client = HttpClient(...)
http_client.get(...)
or
http_client = HttpClient(...)
async with http_client as http_session:
http_session.get(...)
http_session.get(...)
So great it works but now my issue is I would like to do the same for user of services classes to be also able to reuse session or not. I'm a bit stuck but my intention is to have this syntax also :
client = ServiceClient(...)
client.do_something(...) # do_something will call http_client.get(...)
or
client = ServiceClient(...)
async with client as session:
session.do_something(...) # do_something will call http_session.get(...)
session.do_something_else(...) # do_something_else will reuse http_session
but I don't want to do every time:
client = ServiceClient(...)
async with client as session:
session.do_something(...) # session is not reuse so I don't want boilerplate of async with
I have tried to define __aenter__ method but I haven't found an elegant way to avoid duplication of code.
Have you some ideas ?
I have tried to add the same pattern used in HttpClient and HttpClientSession classes in services classes but haven't succeed to have ServiceClient call HttpClient and ServiceClientSession call HttpClientSession.
And in fact I'm even not sure it is the better pattern to do that.

I have found one solution.
For the HttpClient I have refactor to remove the HttpClientSession class and manage optional session:
class HttpClient:
"""
This class is used to interfacing Http server.
"""
def __init__(
self,
http_url: str,
auth: Optional[BasicAuth] = None,
):
"""
Constructs the Http client.
:param http_url: URL to connect to Http server.
:param auth: Authentication to connect to server.
:raises Exception: If URL is empty.
"""
logger.debug("Create Http client")
if not http_url:
raise Exception("Http URL is invalid")
self._url: str = http_url
#: Server authentication
self._auth: Optional[BasicAuth] = auth
#: HTTP session
self._session: Optional[ClientSession] = None
logger.debug("Http client created")
async def __aenter__(self):
"""
Create Http session to send requests.
"""
self._session = ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
)
return self
async def __aexit__(self, *err):
"""
Close Http session.
"""
await self._session.close()
self._session = None
async def get(self, suffix_url: str, headers: dict, query: Any = None):
"""
Send a GET request.
:param suffix_url: Last part of the URL contains the request.
:param headers: Header for the request.
:param query: Query of the request.
:return: Response.
"""
if self._session:
async with self._session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
else:
async with ClientSession(
auth=self._auth,
raise_for_status=True,
connector=TCPConnector(limit=50),
) as session:
async with session.get(
url=self._url + suffix_url,
headers=headers,
params=query,
) as response:
return await response.json()
And in service class, I have defined also the methods to support async with syntax:
async def __aenter__(self):
"""
Create session to send requests.
"""
await self._http_client.__aenter__()
return self
async def __aexit__(self, *err):
"""
Close session.
"""
await self._http_client.__aexit__(*err)
So it works but I don't know if there is a better way to do it because I'm not fan of the if-else statement.

Related

Cookie don't send, blacksheep, python

In Blacksheep, there is such a POST request (in the Login(Controller) class)
#post('/login')
async def login(self, request: Request) -> Response:
data = await request.form()
response = self.redirect('/')
response.set_cookie(Cookie('log', 'log'))
return response
and GET which accepts a redirect (in the Home(Controller) class)
#get('/')
def index(self, request: Request):
print(request.cookies.values())
return self view('forums')
Console outputs dict_values([]).
Why aren't cookies being sent? I also searched in the documentation, but found nothing.

Python Aiohttp: cookies behaviour on same domain

Hopefully this is not a too stupid question, but I am having trouble with aiohttp cookie processing.
Aiohttp's CookieJar class mentions it implements cookie storage adhering to RFC 6265, which states that:
cookies for a given host are shared across all the ports on that host
Cookies do not provide isolation by port. If a cookie is readable by a service running on one port, the cookie is also readable by a service running on another port of the same server.
But if I create two aiohttp servers, one that makes you "login" and gives you a cookie back, and another one with an endpoint that expects you to have a cookie, both hosted on localhost (two different ports I guess), the cookie will not be processed.
Here's a set of 4 tests using aiohttp, pytest, pytest and pytest-aiohttp to explain:
import functools
import pytest
from aiohttp import web
pytestmark = pytest.mark.asyncio
def attach_session(f):
#functools.wraps(f)
async def wrapper(request: web.Request):
session_id = request.cookies.get("testcookie")
request["mysession"] = session_id
response = await f(request)
response.set_cookie("testcookie", session_id)
return response
return wrapper
def is_logged_in(f):
#functools.wraps(f)
#attach_session
async def wrapper(request: web.Request):
session = request["mysession"]
if not session:
raise web.HTTPUnauthorized
return await f(request)
return wrapper
async def login(_: web.Request):
response = web.Response()
response.set_cookie("testcookie", "somerandomstring")
return response
#is_logged_in
async def some_endpoint(request: web.Request):
return web.Response(text="sweet")
#pytest.fixture
def auth_client(event_loop, aiohttp_client):
app = web.Application()
app.router.add_post("/login", login)
return event_loop.run_until_complete(aiohttp_client(app))
#pytest.fixture
def core_client(event_loop, aiohttp_client):
app = web.Application()
app.router.add_get("/some_endpoint", some_endpoint)
return event_loop.run_until_complete(aiohttp_client(app))
async def test_login(auth_client):
resp = await auth_client.post("/login")
assert resp.status == 200
assert resp.cookies.get("testcookie").value == "somerandomstring"
async def test_some_endpoint_anonymous(core_client):
resp = await core_client.get("/some_endpoint")
assert resp.status == 401
async def test_some_endpoint_as_logged_in(auth_client, core_client):
resp1 = await auth_client.post("/login")
resp2 = await core_client.get("/some_endpoint", cookies=resp1.cookies)
assert resp2.status == 401
async def test_some_endpoint_as_logged_in_again(auth_client, core_client):
resp1 = await auth_client.post("/login")
_cookie = list(resp1.cookies.values())[0]
resp2 = await core_client.get(
"/some_endpoint", cookies={_cookie.key: _cookie.value}
)
assert resp2.status == 200
But from my understanding, the "test_some_endpoint_as_logged_in" test should work. Why is it returning 401, while the same thing but with sending the cookie as a dict returns 200?
I think the correct way of sharing the cookies between clients would be loading the SimpleCookie object of the resp1 to the core_client.session.cookie_jar.
Changing the code of the test_some_endpoint_as_logged_in to should fix it:
async def test_some_endpoint_as_logged_in(auth_client, core_client):
resp1 = await auth_client.post("/login")
core_client.session.cookie_jar.update_cookies(resp1.cookies)
resp2 = await core_client.get("/some_endpoint")
assert resp2.status == 401
Cookie data is kept in the session object as the auth_client and core_client are different sessions with there own data cookie data is not shared. It is comparable to using a different browser with each there own cookie_jar.

Make Bulk Requests API Client Programming Pattern / Design

I am writing an API client for a REST API that allows for requests the be batched together in and send as one request
I am struggling to figure out how to structure the client. I've come up with two designs so far (pseudo Python code):
Async Method
When calls to the api are made (make_api_request), the method waits for the bulk request to be made. Once the bulk request is made by calling the resolve coroutine, the request coroutine releases control back to he make_api_request method which processes the response for that specific request and returns it
import asyncio
class FakeAPIClient:
def __init__(self):
"""
"""
pass
async def make_api_request(self):
"""
returns response after request is made
"""
resp = await self.request()
#response processing
return resp
async def request(self):
"""
I block until the bulk request is made
Once it is make, I return the response from the single request
"""
return "RESPONSE AFTER DONE BLOCKING "
async def resolve(self):
"""
Make the bulk request and release the make_api_request calls and return the assocaited responses
"""
return None
async def example():
api = FakeAPIClient()
future1 = api.make_api_request()
future2 = api.make_api_request()
future3 = api.make_api_request()
#3 requests are sent in bulk
response1, response2, response3, _ = asyncio.gather(
future1,
future2,
future3,
api.resolve()
)
asyncio.run(example())
Lookup method
When calls to the api are made (make_api_request), a lookup ID is returned and the request is put into storage. When the resolve method is called, the bulk request is called, the requests in storage are sent as a bulk request and an object is returned that can be used to find the lookup id's corresponding response.
class FakeAPIClient:
def __init__(self):
pass
def make_api_request(self):
"""
adds request to a queue of requests that will be resolved when resolve method is called
also provide a callback for post request processing
returns a unique id of the request
"""
return "UNIQUE ID"
def resolve(self):
"""
makes the bulk request
takes the responses and associates them with the request id
calls the callback associated with the response for post request processing
returns an object with a method that returns the responses when provided the request id
"""
return "RESPONSE LOOKUP"
api = FakeAPIClient()
lookup_id1 = api.make_api_request()
lookup_id2 = api.make_api_request()
lookup_id3 = api.make_api_request()
lookup_object = api.resolve()
response1 = lookup_object.get(lookup_id1)
response2 = lookup_object.get(lookup_id2)
response3 = lookup_object.get(lookup_id3)
I don't really love either of these solutions, but I can't think of any alternatives. I'd assume there are known patterns for solving this problem, what are they?

How to access Request body in FastAPI class based view

I have the request object as class-level dependency like shown here, to be able to use it in all routes within the class. The problem however is when I try to access the request body - it errors with Stream consumed error.
Example code:
from fastapi import File, Request, UploadFile
from fastapi_utils.inferring_router import InferringRouter
from fastapi_utils.cbv import cbv
app = FastAPI()
router = InferringRouter()
#cbv(router)
class ExampleRouteClass:
request: Request
file: Optional[UploadFile] = File(None)
type: Optional[str] = None
#router.post("/example-route/")
async def example_function(self):
headers = self.request.headers # this works like a charm
data = await self.request.json() # this errors with RuntimeError: Stream consumed
return headers
app.include_router(router)
Example curl request:
curl -x POST 'http://example.com:port/example-route'
-H 'secret-key: supersecret'
-d '{"some_data": "data"}'
The problem was with the UploadFile - so as a solution I placed it in the route itself, as it is the only route in the class that uses it anyway.
Leaving this here for anyone who has this error:
class ExampleRouteClass:
request: Request
type: Optional[str] = None
#router.post("/example-route/")
async def example_function(self, file: Optional[UploadFile] = File(None)):
headers = self.request.headers # this works like a charm
data = await self.request.json() # this works now too
return headers

How to access request object in router function using FastAPI?

I am new to FastAPI framework, I want to print out the response. For example, in Django:
#api_view(['POST'])
def install_grandservice(req):
print(req.body)
And in FastAPI:
#app.post('/install/grandservice')
async def login():
//print out req
I tried to to like this
#app.post('/install/grandservice')
async def login(req):
print(req.body)
But I received this error: 127.0.0.1:52192 - "POST /install/login HTTP/1.1" 422 Unprocessable Entity
Please help me :(
Here is an example that will print the content of the Request for fastAPI.
It will print the body of the request as a json (if it is json parsable) otherwise print the raw byte array.
async def print_request(request):
print(f'request header : {dict(request.headers.items())}' )
print(f'request query params : {dict(request.query_params.items())}')
try :
print(f'request json : {await request.json()}')
except Exception as err:
# could not parse json
print(f'request body : {await request.body()}')
#app.post("/printREQUEST")
async def create_file(request: Request):
try:
await print_request(request)
return {"status": "OK"}
except Exception as err:
logging.error(f'could not print REQUEST: {err}')
return {"status": "ERR"}
You can define a parameter with a Request type in the router function, as
from fastapi import FastAPI, Request
app = FastAPI()
#app.post('/install/grandservice')
async def login(request: Request):
print(request)
return {"foo": "bar"}
This is also covered in the doc, under Use the Request object directly section

Categories