Is this the right way to do dependency injection in Django? - python

I'm trying to inject dependencies into my Django view (controller?). Here's some background.
Normally, the urls.py file is what handles the routing. It is usually something like this:
urlpatterns = [
path("", views.get_all_posts, name="get_all_posts"),
path("<int:post_id>", views.get_post, name="get_post"),
path("create", views.create_post, name="create_post"),
]
The problem with this, is that once you get to create_post for instance, you might have a dependency on a service that creates posts:
# views.py
...
def create_post(self):
svc = PostCreationService()
svc.create_post()
This kind of pattern is difficult to test. While I know python testing libraries have tools to mock this sort of thing, I'd rather inject the dependency into the view. Here's what I came up with.
A Controller class that has a static method, export(deps) that takes in a list of dependencies and returns a list of url pattern objects:
class ApiController(object):
#staticmethod
def export(**deps):
ctrl = ApiController(**deps)
return [
path("", ctrl.get_all_posts, name="get_all_posts"),
path("<int:post_id>", ctrl.get_post, name="get_post"),
path("create", ctrl.create_post, name="create_post"),
]
def __init__(self, **deps):
self.deps = deps
def get_all_posts():
pass
...
This looks janky, but I'm not aware of any other way to do what I'm trying to do. The controller needs to return a list of url patterns, and it also needs to take in a list of dependencies. Using the above technique, I can do this in urls.py:
urlpatterns = ApiController.export(foo_service=(lambda x: x))
I am now free to use foo_service in any of the methods of ApiController.
Note:
One alternative would be for the constructor to return the list of urls, but I don't see that as a huge improvement over this. In fact, it strikes me as being more confusing because the class constructor would return a list instead of an instance of the class.
Note 2:
I'm aware that python has mocking tools for mocking class members. Please don't suggest using them. I'd like to use DI as the way to control and manage dependencies.
Any ideas on what the best way to do this is?

Consider injecting using decorators:
from functools import wraps
class ServiceInjector:
def __init__(self):
self.deps = {}
def register(self, name=None):
name = name
def decorator(thing):
"""
thing here can be class or function or anything really
"""
if not name:
if not hasattr(thing, "__name__"):
raise Exception("no name")
thing_name = thing.__name__
else:
thing_name = name
self.deps[thing_name] = thing
return thing
return decorator
def inject(self, func):
#wraps(func)
def decorated(*args, **kwargs):
new_args = args + (self.deps, )
return func(*new_args, **kwargs)
return decorated
# usage:
si = ServiceInjector()
# use func.__name__, registering func
#si.register()
def foo(*args):
return sum(args)
# we can rename what it's been registered as, here, the class is registered
# with name `UpperCase` instead of the class name `UpperCaseRepresentation`
#si.register(name="UpperCase")
class UpperCaseRepresentation:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value.upper()
#register float
si.register(name="PI")(3.141592653)
# inject into functions
#si.inject
def bar(a, b, c, _deps): # the last one in *args would be receiving the dependencies
UpperCase, PI, foo = _deps['UpperCase'], _deps['PI'], _deps['foo']
print(UpperCase('abc')) # ABC
print(PI) # 3.141592653
print(foo(a, b, c, 4, 5)) # = 15
bar(1, 2, 3)
# inject into class methods
class Foo:
#si.inject
def my_method(self, a, b, _deps, kwarg1=30):
return _deps['foo'](a, b, kwarg1)
print(Foo().my_method(1, 2, kwarg1=50)) # = 53

You could take a look at https://github.com/ets-labs/python-dependency-injector, but that is a pretty big setup.
You could also create something small like a Service factory
# services.py
class ServiceFactory:
def __init__(self):
self.__services = {}
def register(self, name, service_class):
# Maybe add some validation
self.__services[name] = service_class
def create(self, name, *args, **kwargs):
# Maybe add some error handling or fallbacks
return self.__services[name](*args, **kwargs)
factory = ServiceFactory()
# In your settings.py for example
from services import factory
factory.register('post_creation', PostCreationService)
# Or maybe in apps.ready do auto_load that will loop all apps and get config from services.py
# In your views.py
from services import factory
def create_post(self):
svc = factory.create('post_creation')
svc.create_post()
# In your tests.py
from services import factory
def setUp(self):
factory.register('post_creation', FakePostCreationService)

While reading Dependency Injection Principles, Practices, and Patterns and trying to apply the examples to a django app I came up with the following:
# views.py
class IndexView(View):
# Must include this to bypass django's validation
product_service: IProductService = None
# Init method not necessary but more explicit
def __init__(self, product_service: IProductService):
self.product_service = product_service
def get(self, request):
self.product_service.do_stuff()
...
# urls.py
# Construct dependencies. I guess this is the closest to the entry-point we can get
# with Django.
repo = DjangoProductRepository()
product_service = ProductService(repo)
urlpatterns = [
path('admin/', admin.site.urls),
path("",
IndexView.as_view(product_service=product_service),
name="index"),
]

This is only an updated version of rabbit.aaron reply above. My idea is to be able to specify which dependencies to inject instead of getting a dictionary with all registered dependencies.
from functools import wraps
class ServiceInjector:
deps = {}
def register(self, name=None):
name = name
def decorator(thing):
"""
thing here can be class or function or anything really
"""
if not name:
if not hasattr(thing, '__name__'):
raise Exception('no name')
thing_name = thing.__name__
else:
thing_name = name
self.__class__.deps[thing_name] = thing
return thing
return decorator
class inject:
def __init__(self, *args):
self.selected_deps = args
def __call__(self, func):
#wraps(func)
def decorated(*args, **kwargs):
selected_deps = {k: v for k, v in ServiceInjector.deps.items() if k in self.selected_deps}
new_kwargs = {**kwargs, **selected_deps}
return func(*args, **new_kwargs)
return decorated
Usage:
si = ServiceInjector()
# use func.__name__, registering func
#si.register()
def foo(*args):
return sum(args)
Custom naming still works
#si.register(name='uppercase')
class UpperCaseRepresentation:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value.upper()
Register float
si.register(name="PI")(3.141592653)
Inject into functions
#si.inject('foo', 'PI', 'uppercase')
def bar(a, b, c, uppercase: UpperCaseRepresentation, **kwargs):
"""
You can specify dependencies as keyword arguments and add typehint annotation.
"""
UpperCase, foo = kwargs['UpperCase'], kwargs['foo']
print(uppercase('abc')) # ABC
print(PI) # 3.141592653
print(foo(a, b, c, 4, 5)) # = 15
bar(1, 2, 3)
Inject into class methods
class Bar:
#si.inject('foo')
def my_method(self, a, b, foo, kwarg1=30):
return foo(a, b, kwarg1)
print(Bar().my_method(1, 2, kwarg1=50)) # = 53

You could go the flask route and export a class instance with a property that initializes and caches the service on first access. E.g:
def default_factory():
pass
# service.py
class ServiceProvider:
def __init__(self, create_instance=default_factory):
self.create_instance = create_instance
_instance = None
#property
def service(self):
if self._instance:
return self._instance
self._instance = self.create_instance()
return self._instance
service_provider = ServiceProvider()
from .service import service_provider
# views.py
def view(request):
service_provider.service.do_stuff()
# etc.
This has the advantages of being easy to mock and not having any magic.

The most boring solution I could come up with involves using class variables:
# Module services.post_service
def default_create_post():
return "foo"
class Provider:
create_post = default_create_post
Then you could import and use normally in a view or elsewhere:
from services import post_service
post_service.Provider.create_post()
# Should return "foo"
And when testing it could be swapped out before being called:
from django.test import TestCase
from services import post_service
from unittest.mock import patch
class MyTestCase(TestCase):
#patch('services.post_service.default_create_post')
def test_some_view(self, mock_create_post):
mock_create_post.return_value = "bar"
post_service.Provider.create_post = mock_create_post
# Now when calling post_service.Provider.create_post it should just return "bar"

Related

How do I handle dependency injection in Django?

I am currently trying to manage dependency injection in a django application. I found this thread (Is this the right way to do dependency injection in Django?) and tried the following simple example based on what I found there.
/dependencyInjection/serviceInjector.py
from functools import wraps
class ServiceInjector:
def __init__(self):
self.deps = {}
def register(self, name=None):
name = name
def decorator(thing):
"""
thing here can be class or function or anything really
"""
if not name:
if not hasattr(thing, "__name__"):
raise Exception("no name")
thing_name = thing.__name__ #Set the name of the dependency to the name of the class
else:
thing_name = name
self.deps[thing_name] = thing
return thing
return decorator
def inject(self, func):
#wraps(func)
def decorated(*args, **kwargs):
new_args = args + (self.deps, )
return func(*new_args, **kwargs)
return decorated
init.py
from backend.dependencyInjection.serviceInjector import ServiceInjector
si = ServiceInjector()
/core/articleModule.py
from backend.core.IarticleModule import IarticleModule
from models import Article
from backend.__init__ import si
#si.register()
class articleModule(IarticleModule):
def getArticleByLioId(self, lioId: str) -> Article:
return Article.objects.get(lioId = lioId)
/services/articleManagementService.py
from backend.services.IarticleManagementService import IarticleManagementService
from backend.models import Article
from backend.__init__ import si
#si.register()
class articleManagementService(IarticleManagementService):
#si.inject
def __init__(self, _deps):
articleModule = _deps['articleModule']
self._articleModule = articleModule()
def getArticleByLioId(self, lioId: str) -> Article:
return self._articleModule.getArticleByLioId(lioId)
/views.py
from backend.__init__ import si
class article(View):
#si.inject
def __init__(self, _deps):
self._articleManagementService = _deps['articleManagementService']
def get(self, request, articleId):
article = self._articleManagementService.getArticleByLioId(articleId)
return article
When running this code i get the following error:
articleManagementService.getArticleByLioId() missing 1 required positional argument: 'lioId'
It seems to me like the articleManagementService is getting an uninstantiated version of the articleModule class. Is there any way i could solve this in order to get this idea to work nicely?
Sorry for the long question, I tried to only include the necessary parts of the code.

How can you solve this in Python? Automatically supply an argument to a static method when called from a managing class

I have some classes FooA and FooB which are basically a collection of "static" methods. They operate on data - let's say it is an DataItem object:
# Base class with common behavior
class FooBase:
#classmethod
def method1(cls, arg, data: DataItem):
#res = ...
return res
#classmethod
def method2(cls, arg1, arg2, data: DataItem):
# res = ... # using method1
return res
# specialized classes
class FooA(FooBase):
# define extra methods
pass
class FooB(FooBase):
# define extra methods
pass
# usage 1: as "static methods"
res = FooA.method1(arg, data)
res2 = FooB.method2(args, data)
Now, I'd like to use these classes as attributes of a "managing" class (MyApp) which also has access to a datasource and should implicitly supply DataItems to the static methods of FooA and FooB. Moreover, the datasource supplies a list of DataItem objects.
# usage 2: as part of an "App" class
# here, the "data" argument should be supplied implicitly by MyApp
# also: MyApp contains a list of "data" objects
class MyApp:
def __init__(self, datasrc):
self.datasrc = datasrc
# this could be a generator
def get_data(self, key) -> List[DataItem]:
return self.datasrc.get_data(key)
# FooA, FooB as class / instance level attributes, descriptors, ???
# usage
my_app = MyApp("datasrc")
res_list = my_app.foo_a.method1(arg) # foo_a is a FooA obj, "data" arg is supplied automatically
# optionally, but not necessarily call as a static attribute:
res = MyApp.foo_a.method1(arg, data: DataItem) # same as FooA.method1(arg, data)
I have tried different things but found not satisfactory solution.
So... I am not sure can it be done in nice way, I thought about that and all approaches has serious drawbacks. One of the problem is we actually want to have a method that returns list or single item, depending on input parameters, which is bad.
One of way could be store datasrc in FooBase, but it violates SRP
class FooBase:
def __init__(self, datasrc):
FooBase.datasrc = datasrc
#classmethod
def method1(cls, arg, data=None):
if data is None:
return [cls.method1(arg, d) for d in cls.datasrc]
return data
Or use isinstance
#classmethod
def method1(cls, arg, data):
if isinstance(data, list):
return [cls.method1(arg, d) for d in data]
return data
But it forces us to adjust every method (which could be done with decorator or metaclass).
Another way could be use some intermediate layer:
def decorator(datasrc):
def wrapper(foo):
def f(*args, **kwargs):
# We could catch TypeError here to serve case when data is passed
return [foo(*args, **kwargs, data=data) for data in datasrc]
return f
return wrapper
class FooAdapter:
def __init__(self, datasrc, foo_cls):
self.datasrc = datasrc
methods = [
getattr(foo_cls, m)
for m in dir(foo_cls)
if callable(getattr(foo_cls, m)) and not m.startswith("__")
] # all methods of our Foo class
for method in methods:
setattr(self, method.__name__, decorator(datasrc)(method))
class MyApp:
def __init__(self, datasrc):
self.datasrc = datasrc
self.foo_a = FooAdapter(datasrc, FooA)
self.foo_b = FooAdapter(datasrc, FooB)
But solution with dynamically added functions breaks IDE support.
The cleanest solution imo could be to have Enum for Foo methods and Enum for Foo classes, then you could write code in MyApp
def get_bulk(m: MethodEnum, f: FooEnum, *args):
return [getattr(enum_to_cls_mapping[f], m)(*args, data=d) for d in self.datasrc]

Apply function or property to multiple methods of same class

In the following example, the class MyExample read a list of filenames which have underscores in their file names.
class MyExample(object):
def __init__(self, location: str):
self.location = location
def get_file_names(self):
# For simplicity, lets just return an example of the output
return ["my_file_name_01.txt", "my_file_name_02.txt"]
def get_file_name_number(self):
# Idem as before.
return ["file_name_01.txt", "file_name_02.txt"]
def get_file_size(self):
# It does not return a list of strings so the desired
# property or function will not be applied here
return [3800, 4000]
When the code is executed, the results are
my_object = MyExample("./a/random/path")
print(my_object.get_file_names())
print(my_object.get_file_name_number())
print(my_object.get_file_size())
# The results:
['my_file_name_01.txt', 'my_file_name_02.txt']
['file_name_01.txt', 'file_name_02.txt']
[3800, 4000]
Now, I would like to find a way to apply a function or property (let's call it to_dot) that can replace the underscores for dots from the output of get_file_names() and get_file_name_number.
The final code should have to return something like
# Calling this function with .to_dots
my_object.get_file_names().to_dots
["my.file.name.01.txt", "my.file.name.02.txt"] # <-- The desired output
# Calling this function with .to_dots
my_object.get_file_name_number().to_dots
["file.name.01.txt", "file.name.02.txt"] # <-- The desired output
# Calling this function with .to_dots
my_object.get_file_name_number().to_dots
AttributeError # # <-- The desired output ... or something similar
Is there a way to add the to_dots to the class MyExample in order to replace the underscores of some of the methods inside?
I am not very familiar with decorators but I suspect there could be a trick to do that. So far I have unsuccessfully tried with #property but as far as I know, there are many types of decorators.... Or maybe it can be done without decorators and I am very lost.
Thank you.
You could do something like this:
from collections import UserList
class MyList(UserList):
#property
def to_dots(self):
return [s.replace("_", ".") for s in self.data]
class MyExample(object):
def __init__(self, location: str):
self.location = location
def get_file_names(self):
return MyList(["my_file_name_01.txt", "my_file_name_02.txt"])
Result for
my_object = MyExample("./a/random/path")
print(my_object.get_file_names())
print(my_object.get_file_names().to_dots)
is
['my_file_name_01.txt', 'my_file_name_02.txt']
['my.file.name.01.txt', 'my.file.name.02.txt']
You can use a class decorator to apply a function decorator to the specified class methods.
from functools import wraps
import inspect
def dec_methods(decorator, *members):
"""Class decorator to apply specfied decorator to specified members of class."""
def dec_the_class(cls):
for name, m in inspect.getmembers(cls, inspect.isfunction):
if name in members:
setattr(cls, name, decorator(m))
return cls
return dec_the_class
def to_dots(func):
"""Function decorator to replace '_' with dots in list of strings returned."""
#wraps(func)
def wrapped(*args, **kwargs):
results = func(*args, **kwargs)
results = [result.replace('_', '.') for result in results]
return results
return wrapped
#dec_methods(to_dots, 'get_file_names', 'get_file_name_number')
class MyExample(object):
def __init__(self, location: str):
self.location = location
def get_file_names(self):
# For simplicity, lets just return an example of the output
return ["my_file_name_01.txt", "my_file_name_02.txt"]
def get_file_name_number(self):
# Idem as before.
return ["file_name_01.txt", "file_name_02.txt"]
def get_file_size(self):
# It does not return a list of strings so the desired
# property or function will not be applied here
return [3800, 4000]
my_object = MyExample("./a/random/path")
print(my_object.get_file_names()) # -> ['my.file.name.01.txt', 'my.file.name.02.txt']
print(my_object.get_file_name_number()) # -> ['file.name.01.txt', 'file.name.02.txt']
print(my_object.get_file_size()) # -> [3800, 4000]
Why not pass the required output format (dotted or not) as optional argument to those methods?
class MyExample(object):
def __init__(self, location: str):
self.location = location
#staticmethod
def map_list(lst, dotted=False):
return [s.replace("_", ".") for s in lst] if dotted else lst
def get_file_names(self, dotted=False):
return self.map_list(["my_file_name_01.txt", "my_file_name_02.txt"], dotted)
def get_file_name_number(self, dotted=False):
return self.map_list(["file_name_01.txt", "file_name_02.txt"], dotted)
def get_file_size(self):
return [3800, 4000]
my_object = MyExample("./a/random/path")
print(my_object.get_file_names()) # Underscores untouched
print(my_object.get_file_name_number(dotted=True)) # Underscores replaced
print(my_object.get_file_size())
Define your own decorator:
def to_dots(func):
def inner():
original_return = func()
return [item.replace('_','.') for item in original_return ]
return inner
And use it as :
#to_dots
def get_file_names(self):

Extend a class without access to its __init__ method

I am trying to extend a class from a given library but I do not have access to the __init__ of the class, only to a function generating an instance. Something like
class A:
pass
def return_an_A():
return A()
class B(A):
# How to instantiate B with only access to return_an_A
def extend_A():
pass
How can I define the instanciation of class B?
Thanks for your help.
Update
As rightfully noticed, my example was poorly set up so here is, I hope, a better explanation of my real issue.
The original code I used was the following.
import gitlab
# Here gl.projects.get(project_name) is an instance of the class
# gitlab.v4.objects.Project
def project(url, private_token, project_name):
with gitlab.Gitlab(url, private_token) as gl:
return gl.projects.get(project_name)
# This implementation takes an instance of gitlab.v4.objects.Project
def list_files(project, commit_branch):
current_files = []
if not project.empty_repo:
current_files = [
f["path"]
for f in project.repository_tree(
ref=commit_branch
)
]
return current_files
I wanted to have a structure like
class MyProject:
# Here is missing the way to instantiate like the project function
# I don't want to pass a Project instance as a parameter to make it an
# attribute, I would like to extend the class Project itself
def list_files(self, commit_branch):
current_files = []
# Note here that the variables of the
# gitlab.v4.objects.Project are directly accessible
if not self.empty_repo:
current_files = [
f["path"]
for f in self.repository_tree(
ref=commit_branch
)
]
return current_files
but I can't manage to find the right way to write the __init__.
I found a solution to my issue by using a delegation pattern.
class Project:
def __init__(self, private_token, project_name, url):
self.private_token = private_token
self.project_name = project_name
self.url = url
# Delegate all methods and properties to the child class when those are not found
# in the class
def __getattr__(self, attr):
return self._project.__getattribute__(attr)
#property
def _project(self):
with gitlab.Gitlab(self.url, self.private_token) as gl:
return gl.projects.get(self.project_name)
def list_files(self, branch):
current_files = []
if not self._project.empty_repo:
current_files = [
f["path"]
for f in self._project.repository_tree(
ref=branch, recursive=True, all=True
)
if f["type"] == "blob"
]
return current_files

Is there a nice way to partially-bind class parameters in Python? [duplicate]

I want to create a class that behaves like collections.defaultdict, without having the usage code specify the factory. EG:
instead of
class Config(collections.defaultdict):
pass
this:
Config = functools.partial(collections.defaultdict, list)
This almost works, but
isinstance(Config(), Config)
fails. I am betting this clue means there are more devious problems deeper in also. So is there a way to actually achieve this?
I also tried:
class Config(Object):
__init__ = functools.partial(collections.defaultdict, list)
I don't think there's a standard method to do it, but if you need it often, you can just put together your own small function:
import functools
import collections
def partialclass(cls, *args, **kwds):
class NewCls(cls):
__init__ = functools.partialmethod(cls.__init__, *args, **kwds)
return NewCls
if __name__ == '__main__':
Config = partialclass(collections.defaultdict, list)
assert isinstance(Config(), Config)
I had a similar problem but also required instances of my partially applied class to be pickle-able. I thought I would share what I ended up with.
I adapted fjarri's answer by peeking at Python's own collections.namedtuple. The below function creates a named subclass that can be pickled.
from functools import partialmethod
import sys
def partialclass(name, cls, *args, **kwds):
new_cls = type(name, (cls,), {
'__init__': partialmethod(cls.__init__, *args, **kwds)
})
# The following is copied nearly ad verbatim from `namedtuple's` source.
"""
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
"""
try:
new_cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return new_cls
At least in Python 3.8.5 it just works with functools.partial:
import functools
class Test:
def __init__(self, foo):
self.foo = foo
PartialClass = functools.partial(Test, 1)
instance = PartialClass()
instance.foo
If you actually need working explicit type checks via isinstance, you can simply create a not too trivial subclass:
class Config(collections.defaultdict):
def __init__(self): # no arguments here
# call the defaultdict init with the list factory
super(Config, self).__init__(list)
You'll have no-argument construction with the list factory and
isinstance(Config(), Config)
will work as well.
Could use *args and **kwargs:
class Foo:
def __init__(self, a, b):
self.a = a
self.b = b
def printy(self):
print("a:", self.a, ", b:", self.b)
class Bar(Foo):
def __init__(self, *args, **kwargs):
return super().__init__(*args, b=123, **kwargs)
if __name__=="__main__":
bar = Bar(1)
bar.printy() # Prints: "a: 1 , b: 123"

Categories