`__init__` decorator that handles args if empty - python

Let's say I have a class as such:
class Test:
def __init__(self, a, b):
self.a = a
self.b = b
My goal is to create a decorator that handles populating the init args if they do not exist, i.e.:
class Test:
#autoinit
def __init__(self, a, b):
self.a = a
self.b = b
where #autoinit is defined as such:
class autoinit:
def __init__(self, data = {"a": "test_a", "b": "test_b"}):
self.data = data
def __call__(self, func):
decorator = self
def wrapper(*args, **kwargs):
print(decorator.data)
func(self, **decorator.data)
print(decorator.data)
return wrapper
Thus, it will automatically assign the Test attributes to test_a, test_b respectively.
The ideal usage would be as such:
test = Test(a="test_z", b="test_x")
test.a == "test_z"
test.b == "test_x"
# however,
test = Test()
test.a == "test_a"
test.b == "test_b"
# but also,
test = Test(a="test_z")
test.a == "test_z"
test.b == "test_b"
I will always have matching arguments in the Test class to the keys in the data dictionary.
Is this possible? What is the cleanest implementation?
Update:
The intended use is across many independent classes. For example, say I have a global config as such:
config = {
"resourceA": {"a": "test_a", "b": "test_b"},
"resourceB": {"name": "foo", "value": "bar"}
}
The goal would be for the decorator #autoinit(resource="resourceA") to use **config[resource] to populate all __init__ values for given class.

Here's how I would write this:
def autoinit(**kwargs):
if not kwargs:
kwargs = {"a": "test_a", "b": "test_b"} # some default
def wrapper(f):
def wrapped(*args, **overrides):
kwargs.update(overrides) # update kwargs with overrides
return f(*args, **kwargs)
return wrapped
return wrapper
This allows an implementing class as described in your question:
class Test:
#autoinit()
def __init__(self, a, b):
self.a = a
self.b = b
t = Test()
assert t.a = 'test_a'
assert t.b = 'test_b'
t2 = Test(a='test_z')
assert t2.a = 'test_z'
assert t2.b = 'test_b'
With that all being said, consider instead using a mixin that teaches your class how to read from the configuration itself.
from abc import ABC
class ConfigurationDefault(ABC):
#classmethod
def with_config_defaults(cls, config, **kwargs):
new_kwargs = {**config, **kwargs}
return cls(**new_kwargs)
class Test(ConfigurationDefault):
def __init__(self, a, b):
self.a = a
self.b = b
config = {'resources': {'a': 'test_a', 'b': 'test_b'}}
t = Test.with_config_defaults(config['resources'])
t2 = Test.with_config_defaults(config['resources'], a='test_z')

Related

How to patch a function of a global singleton to return a specified value given a specified input?

File c.py has a global variable COBJ of an instance of C (Singleton)
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class C(metaclass=Singleton):
def __init__(self, a, b):
#...
def get(self, x): # To be mocked/patched
return ....
COBJ = C(1, 'z')
And I have a file x.py which import c.py and need to be tested.
from c import COBJ
class X: # to be tested
def f(self, a):
x = COBJ.get(a)
return x + '-AddedInX.f' # just a simple example here
How to mock COBJ.get(a) for some specified parameter inputs.
import x
def test_f:
xobj = X()
input = 'abc'
# need to patch COBJ.get() to return '###' given 'abc'
...
result = xobj.f(input)
assert result == '###-AddedInX.f'
How to patch COJB.get() return a specified value given a specified input? Is COBJ pythonic way to create a singleton global object?
The following program will satisfy adding a patch to a function. When the function executes, it will return the corresponding value according to the value of the parameter, and a function can add multiple patches, and also add a method to delete the patch.
import inspect
from functools import wraps
class Patch:
class _Empty:
pass
# Used to store the corresponding value.
# Basic format: {obj_id: {func_name: {inp_v: ret_v}}}
_patch_mapping = {}
#classmethod
def add_patch(cls, obj, f, inp_v, ret_v):
"""
obj: The instance of the class to which the function belongs
f: The function object that adds the patch
inp_v: The parameter value entered when the function is executed
ret_v: When the function receives a parameter value equal to `inp_v`, it will return `ret_v`
"""
obj_id = id(obj)
f = cls._wrapper(obj_id, f)
cls._patch_mapping.setdefault(
obj_id, {}
).setdefault(f.__name__, {})[inp_v] = ret_v
return f
#classmethod
def remove_patch(cls, obj, f, inp_v=None):
"""Delete the patch, when `inp_v` is None, clear all the patches corresponding to the function."""
obj_m = cls._patch_mapping.get(id(obj), {})
if inp_v is None:
obj_m.pop(f.__name__, None)
else:
obj_m.get(f.__name__, {}).pop(inp_v, None)
#classmethod
def _check(cls, func):
"""Check if the number of arguments to a function is 1 and it is a positional argument."""
sig = inspect.signature(func)
assert len(sig.parameters) == 1
for j in sig.parameters.values():
assert j.kind in (
inspect.Parameter.POSITIONAL_OR_KEYWORD,
inspect.Parameter.POSITIONAL_ONLY
)
#classmethod
def _wrapper(cls, obj_id, func):
cls._check(func)
func_name = func.__name__
#wraps(func)
def inner(param):
nonlocal obj_id, func_name
"""
Judging whether it exists, if it exists, it returns the corresponding value,
and if it does not exist, the function is executed.
"""
if (v := cls._patch_mapping.get(obj_id, {}).get(
func_name, {}
).get(param, cls._Empty)
) != cls._Empty:
return v
return func(param)
return inner
class Singleton(type):
_instances = {}
def add_patch(self, f, inp_v, ret_v):
self.__dict__[f.__name__] = Patch.add_patch(self, f, inp_v, ret_v)
def remove_patch(self, f, inp_v=None):
Patch.remove_patch(self, f, inp_v)
def __new__(cls, name, bases, attrs):
# Add method to class.
attrs["add_patch"] = cls.add_patch
attrs["remove_patch"] = cls.remove_patch
return super().__new__(cls, name, bases, attrs)
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class C(metaclass=Singleton):
def __init__(self, a, b):
#...
pass
def get(self, x): # To be mocked/patched
return "get"
COBJ = C(1, 'z')
print(COBJ.get("a"))
COBJ.add_patch(COBJ.get, "a", 2)
print(COBJ.get("a"))
COBJ1 = C(1, 'z')
print(COBJ1.get("a"))
COBJ1.add_patch(COBJ1.get, "a", 7)
COBJ1.add_patch(COBJ1.get, "b", 8)
print(COBJ1.get("a"))
print(COBJ1.get("b"))
COBJ1.remove_patch(COBJ1.get)
print(COBJ1.get("a"))
print(COBJ1.get("b"))
Output:
get
2
2
7
8
get
get

Wrap an arbitrary class with a new method dynamically

I have a class A.
I have another class B. instances of class B should function exactly like class A, except for one caveat: I want another function available called special_method(self, args, kwargs)
So the following should work:
instance_A = classA(args, kwargs)
instance_B = classB(instance_A)
method_result = instance_B.special_method(args, kwargs)
How do I write class B to accomplish this?
Note: If I only wanted to do this for ONE class A, I could just have class B inherit class A. but I want to be able to add special_method to class C, D, E, F... etc.
So, you are describing a proxy object. Doing this for non-special methods is trivial in Python, you can use the __getattr__
In [1]: class A:
...: def foo(self):
...: return "A"
...:
In [2]: class B:
...: def __init__(self, instance):
...: self._instance = instance
...: def special_method(self, *args, **kwargs):
...: # do something special
...: return 42
...: def __getattr__(self, name):
...: return getattr(self._instance, name)
...:
In [3]: a = A()
In [4]: b = B(a)
In [5]: b.foo()
Out[5]: 'A'
In [6]: b.special_method()
Out[6]: 42
However, there is one caveat here: this won't work with special methods because special methods skip this part of attribute resolution and are directly looked up on the class __dict__.
An alternative, you can simply add the method to all the classes you need. Something like:
def special_method(self, *args, **kwargs):
# do something special
return 42
for klass in [A, C, D, E, F]:
klass.special_method = special_method
Of course, this would affect all instances of these classes (since you are simply dynamically adding a method to the class).
If you really need special methods, your best best would by to create a subclass, but you can do this dynamically with a simple helper function, e.g.:
def special_method(self, *args, **kwargs):
# do something special
return 42
_SPECIAL_MEMO = {}
def dynamic_mixin(klass, *init_args, **init_kwargs):
if klass not in _SPECIAL_MEMO:
child = type(f"{klass.__name__}Special", (klass,), {"special_method":special_method})
_SPECIAL_MEMO[klass] = child
return _SPECIAL_MEMO[klass](*init_args, **init_kwargs)
class Foo:
def __init__(self, foo):
self.foo = foo
def __len__(self):
return 88
def bar(self):
return self.foo*2
special_foo = dynamic_mixin(Foo, 10)
print("calling len", len(special_foo))
print("calling bar", special_foo.bar())
print("calling special method", special_foo.special_method())
The above script prints:
calling len 88
calling bar 20
calling special method 42

Overriding parent's attributes with child's class attributes

I have a scheme of cooperative classes based on collection.abc. When I subclass them, I want to be able to define just a couple of class attributes that then become the default values at instantiation, like so:
class MyFancyClass:
# Defines various attributes, as class attributes and/or in the
# __init__ method
def __init__(self, a=1, b=1):
self.a = a
self.b = b
class A(myFancyClass):
# Instances of A should have these values, even if they override
# a value set in MyFancyClass's __init__ method:
a = 2
b = 2
c = SomeHelperClass
Currently, in the __init__ of FancyClass, I do:
def __init__(self, *args, **kwargs):
for k, v in vars(type(self)).items():
if k.startswith("_"):
continue
if k not in kwargs:
kwargs[k] = v
super().__init__(*args, **kwargs)
That works fine, but if I make a class B that is a subclass of A, I lose those values defined for A, and I want to keep them.
So playing around, I got stuck here...
class InitExtras:
def __init__(self, *args, **kwargs):
for cls in type(self).__mro__:
if cls == InitExtras:
break
for k, v in vars(cls).items():
if k.startswith("_") or callable(v):
continue
if k not in kwargs:
print(f"adding\n{k=}\n{v=}\n")
kwargs[k] = v
super().__init__(*args, **kwargs)
class Base:
def __init__(self, *args, **kwargs):
print(f"{args = }")
print(f"{kwargs = }")
class A(Base):
def fun1(self):
pass
class B(A):
def fun2(self):
pass
#property
def b(self):
return self._b
#b.setter
def b(self, value):
self._b = value
def __init__(self, *args, b=23, b2=32, **kwargs):
super().__init__(*args, **kwargs)
self.b = b
self.b2 = b2
class C(InitExtras, B):
b = 42
class D(C):
b2 = 420
class T:
pass
class E(C):
b2 = T
def fun3(self):
pass
This seem to do most of what I want, except that E().b2 is 32, not T. And if I remove the callable() filter, other stuff can get mixed in too, like extra functionalities one might define later to personalize classes even further if needed (fun3 in the example). I don't want to need to do a new __init__ each time.
So my question is, how to accomplish that?
I could solve it, I did by making a metaclass, and to distinguish between different class attributes I limit it to just properties
abc_recipes.py
from abc import ABCMeta, ABC, abstractmethod
class PropertyConfigMeta(ABCMeta):
def __new__(mcls, name, bases, namespace, /, **kwargs):
#list the properties that the new class would inherit
properties = {p for bcls in bases
for cls in bcls.__mro__
for p,v in vars(cls).items()
if isinstance(v,property)
}
#proceed to extract the attributes that would
#overwrite the properties inherited by non-property
new_default={}
new_namespace = {}
for k,v in namespace.items():
if k in properties:
if isinstance(v,property):
new_namespace[k] = v
else:
new_default[k] = v
else:
new_namespace[k] = v
cls = super().__new__(mcls, name, bases, new_namespace, **kwargs)
if hasattr(cls,"_new_default"):
cls._new_default = {**cls._new_default, **new_default}
else:
cls._new_default = new_default
return cls
class PropertyConfig(metaclass=PropertyConfigMeta):
"""cooperative class that transform
class A(SomeClass):
a = 1
b = 2
into
class A(SomeClass):
def __init__(self, *arg, a = 1, b = 2, **karg):
super().__init__(*arg, a = a, b = b, **karg)
so long as a and b are defined as properties in SomeClass
(or somewhere in the inheritance chain)
class SomeClass:
#property
def a(self):
...
#property
def b(self):
...
Use as
class A(PropertyConfig, SomeClass):
a = 1
b = 2
"""
def __init__(self,*arg,**kwargs):
for k,v in self._new_default.items():
if k not in kwargs:
kwargs[k]=v
super().__init__(*arg,**kwargs)
class ConfigClass(ABC):
"""Cooperative class that offer a default __repr__ method
based on the abstract property .config"""
#property
#abstractmethod
def config(self) -> dict:
"""configuration of this class"""
return {}
def __repr__(self):
return f"{type(self).__name__}({', '.join( f'{k}={v!r}' for k,v in self.config.items() )})"
sample use
import abc_recipes
class Base:
def __init__(self,*arg,**karg):
if arg:
print(f"{arg=}")
if karg:
print(f"{karg=}")
class A(Base):
pass
class B(abc_recipes.ConfigClass,A):
def __init__(self,*a, b=23, b2=32, **k):
super().__init__(*a,**k)
self.b = b
self.b2 = b2
#property
def b(self):
"b attribute"
#print("b getter")
return self._b
#b.setter
def b(self,v):
#print("b setter")
self._b=v
#property
def b2(self):
"b2 atrribute"
#print("b2 getter")
return self._b2
#b2.setter
def b2(self,v):
#print("b2 setter")
self._b2=v
#property
def config(self) -> dict:
"""configuration of this class"""
res = super().config
res.update(b=self.b, b2=self.b2)
return res
class C(abc_recipes.PropertyConfig,B):
b=42
pass
class D(C):
b2=420
pass
class T:
pass
class E(C):
b2 = T
pi = 3.14
class F(E):
#property
def b2(self):
#print("rewriten b2 getter")
return "rewriten b2"
#b2.setter
def b2(self, value):
#print("rewriten b2 setter")
pass
test
>>> F()
F(b=42, b2='rewriten b2')
>>> E()
E(b=42, b2=<class '__main__.T'>)
>>> D()
D(b=42, b2=420)
>>> C()
C(b=42, b2=32)
>>> B()
B(b=23, b2=32)
>>> e=E()
>>> e.pi
3.14
>>> f=F()
>>> f.pi
3.14
>>>

How to get the class from which a method was called?

The get_calling_class function must pass the following tests by returning the class of the method that called the A.f method:
class A:
def f(self): return get_calling_class()
class B(A):
def g(self): return self.f()
class C(B):
def h(self): return self.f()
c = C()
assert c.g() == B
assert c.h() == C
Walking the stack should give the answer.
The answer should ideally be, in the caller's stack frame.
The problem is, the stack frames only record the function
names (like so: 'f', 'g', 'h', etc.) Any information about
classes is lost. Trying to reverse-engineer the lost info,
by navigating the class hierarchy (in parallel with the
stack frame), did not get me very far, and got complicated.
So, here is a different approach:
Inject the class info into the stack frame
(e.g. with local variables),
and read that, from the called function.
import inspect
class A:
def f(self):
frame = inspect.currentframe()
callerFrame = frame.f_back
callerLocals = callerFrame.f_locals
return callerLocals['cls']
class B(A):
def g(self):
cls = B
return self.f()
def f(self):
cls = B
return super().f()
class C(B):
def h(self):
cls = C
return super(B, self).f()
def f(self):
cls = C
return super().f()
c = C()
assert c.h() == C
assert c.g() == B
assert c.f() == B
Related:
get-fully-qualified-method-name-from-inspect-stack
Without modifying the definition of subclasses:
Added an "external" decorator, to wrap class methods.
(At least as a temporary solution.)
import inspect
class Injector:
def __init__(self, nameStr, valueStr):
self.nameStr = nameStr
self.valueStr = valueStr
# Should inject directly in f's local scope / stack frame.
# As is, it just adds another stack frame on top of f.
def injectInLocals(self, f):
def decorate(*args, **kwargs):
exec(f'{self.nameStr} = {self.valueStr}')
return f(*args, **kwargs)
return decorate
class A:
def f(self):
frame = inspect.currentframe()
callerDecoratorFrame = frame.f_back.f_back # Note:twice
callerDecoratorLocals = callerDecoratorFrame.f_locals
return callerDecoratorLocals['cls']
class B(A):
def g(self): return self.f()
def f(self): return super().f()
class C(B):
def h(self): return super(B, self).f()
def f(self): return super().f()
bInjector = Injector('cls', B.__name__)
B.g = bInjector.injectInLocals(B.g)
B.f = bInjector.injectInLocals(B.f)
cInjector = Injector('cls', C.__name__)
C.h = cInjector.injectInLocals(C.h)
C.f = cInjector.injectInLocals(C.f)
c = C()
assert c.h() == C
assert c.g() == B
assert c.f() == B
I found this link very interesting
(but didn't take advantage of metaclasses here):
what-are-metaclasses-in-python
Maybe someone could even replace the function definitions*,
with functions whose code is a duplicate of the original;
but with added locals/information, directly in their scope.
*
Maybe after the class definitions have completed;
maybe during class creation (using a metaclass).

Automatically create (and keep) an object when accessed

I would like to do something like this:
class A:
def hello(): print "Hello"
# I do not want to explicitly setup a:
a = A()
# a = A() -> I want this to happen automatically when I access a
# My first try is this:
def a():
return A()
# Also, I do not want to call a as a function a(): it must be an object
# And it must stay alive and initialized
a.hello() # a is created, as object of class A
a.hello() # I do not want a second instantiation
How can I implement this? properties? cached-properties? They are only for classes: a is a module-level object.
Maybe something like this:
class A(object):
def hello(self):
print "Hello"
class LazyA(object):
def __init__(self):
self.instance = None
def __getattr__(self, k):
if self.instance is None:
self.instance = A()
return getattr(self.instance, k)
a = LazyA()
def lazyinit(cls):
class p(object):
def __init__(self, *args, **kws):
self._init = lambda: cls(*args, **kws)
self._obj = None
def __getattr__(self, k):
if not self._obj:
self._obj = self._init()
return getattr(self._obj, k)
return p
Example:
#lazyinit
class A(object):
def __init__(self, a, b):
print("initializing...")
self.x = a + b + 2
def foo(self):
return self.x
x = A(39, 1)
print x
print x.foo()
print x.foo()
Generalization of the answer by Pavel:
class LazyClass(object):
def __init__(self, myclass, *args, **kwargs):
self.instance = None
self.myclass = myclass
self.args = args
self.kwargs = kwargs
def __getattr__(self, k):
if self.instance is None:
self.instance = self.myclass(*self.args, **self.kwargs)
return getattr(self.instance, k)
class A(object):
def __init__ (self, name):
self.name = name
print "Created"
def hello(self):
print "Hello " + self.name
import unittest
class TestLazyClass(unittest.TestCase):
def setUp(self):
self.a = LazyClass(A, 'Daniel')
def test_it(self):
self.a.hello()
self.a.hello()

Categories