I have a class that extends a base class. Upon instantiation, I want to check if the subclass has one of the classes implemented from its base, but I'm not sure the best way. hasattr(self, '[method]') returns the method from super if not implemented by the child, so I'm trying to tell the difference.
Here is an example:
class Base :
def __init__ ( self,) :
pass
def fail (self,) :
pass
# Now create the subclass w/o .fail
class Task ( Base ) :
def __init__ ( self, ):
print( hasattr( self, 'fail' ) ) # < returns True
When Task() is instantiated it prints True because Task inherits .fail from Base. But in this case, I want to know that Task Does Not implement .fail, so I want a False returned somehow. It seems like I'm looking for something like isimplemented( self, 'fail' ). What am I missing?
I'm not sure I understand correctly, but it sounds like you might be looking for Abstract Base Classes. (Documentation here, tutorial here.) If you specify an abstractmethod in a base class that inherits from abc.ABC, then attempting to instantiate a subclass will fail unless that subclass overrides the abstractmethod.
from abc import ABC, abstractmethod
class Base(ABC):
#abstractmethod
def fail(self):
pass
class Task(Base):
pass
class Task2(Base):
def fail(self):
pass
# this raises an exception
# `fail` method has not been overridden in the subclass.
t1 = Task()
# this succeeds
# `fail` method has been overridden in the subclass.
t2 = Task2()
If you want a check to happen at class definition time rather than instance instantiation time, another option is to write an __init_subclass__ method in your base class, which is called every time you subclass your base class or you subclass a class inheriting from your base class. (You don't have to raise an exception in __init_subclass__ — you could just add a fail_overriden boolean attribute to the class, or do anything you like really.)
class Base:
def fail(self):
pass
def __init_subclass__(cls, **kwargs):
if cls.fail == Base.fail:
raise TypeError(
'Subclasses of `Base` must override the `fail` method'
)
super().__init_subclass__(**kwargs)
# this class definition raises an exception
# because `fail` has not been overridden
class Task(Base):
pass
# this class definition works fine.
class Task2(Base):
def fail(self):
pass
And if you just want each instance to tell you whether fail was overridden in their subclass, you can do this:
class Base:
def __init__(self):
print(type(self).fail != Base.fail)
def fail(self):
pass
class Task(Base):
def __init__(self):
super().__init__()
class Task2(Base):
def __init__(self):
super().__init__()
def fail(self):
pass
t1 = Task() # prints "True"
t2 = Task2() # prints "False"
IIUC, you can check super().fail == self.fail
class Base:
def __init__(self):
pass
def fail(self):
pass
class Task(Base):
def __init__(self):
print(super().fail == self.fail)
class Task2(Base):
def __init__(self):
print(super().fail == self.fail)
def fail(self):
# Override
pass
Output:
t1 = Task()
# True
t2 = Task2()
# False
Not sure if I understand correctly but you could check if fail method is in the vars of the classes, but not inherited to the main class.
So you could try:
class Base:
def __init__(self):
print(self.__dir__())
def fail(self):
pass
class Task(Base):
def __init__(self):
print('fail' not in vars(Task))
class Task2(Base):
def __init__(self):
print('fail' not in vars(Task2))
def fail(self):
# Override
pass
t1 = Task()
t2 = Task2()
Output:
True
False
Or use __dict__:
...
class Task(Base):
def __init__(self):
print('fail' not in Task.__dict__)
class Task2(Base):
def __init__(self):
print('fail' not in Task2.__dict__)
def fail(self):
# Override
pass
...
Related
Imagine a parent class which has a mangled attribute, and a child class:
class Foo:
def __init__(self):
self.__is_init = False
async def init(self):
# Some custom logic here, not important
self.__is_init = True
class Bar(Foo):
...
# Create class instance.
bar = Bar()
# How access `__is_init` of the parent class from the child instance?
How can I get a __is_init value from a parent (Foo) class?
Obviously, I can bar._Foo__is_init in this example, but the problem is that class name is dynamic and I need a general purpose solution that will work with any passed class name.
The solution I see now is iterating over parent classes, and building a mangled attribute name dynamically:
from contextlib import suppress
class MangledAttributeError(Exception):
...
def getattr_mangled(object_: object, name: str) -> str:
for cls_ in getattr(object_, "__mro__", None) or object_.__class__.__mro__:
with suppress(AttributeError):
return getattr(object_, f"_{cls_.__name__}{name}")
raise MangledAttributeError(f"{type(object_).__name__} object has no attribute '{name}'")
Checking that this works:
class Foo:
def __init__(self):
self.__is_init = False
async def init(self):
self.__is_init = True
class Bar(Foo):
def __init__(self):
super().__init__()
bar = Bar()
is_init = getattr_mangled(bar, "__is_init")
print(f"is_init: {is_init}") # Will print `False` which is a correct value in this example
class Foo:
def __init__(self):
self.__is_init = False
async def init(self):
self.__is_init = True
class Bar(Foo):
def getattr_mangled(self, attr:str):
for i in self.__dict__.keys():
if attr in i:
return getattr(self,i)
# return self.__dict__[i] #or like this
bar = Bar()
print(bar.getattr_mangled('__is_init')) #False
if there is a need in __init__ in Bar we should of course initiate Foo's init too by: super().__init__()
When Foo's init is run, self namespace already has attribute name we need in the form we need it (like_PARENT_CLASS_NAME__attrname).
And we can just get it from self namespace without even knowing what parent class name is.
Need some help to implement/understand how decorators as a class work in Python. Most examples I've found are either decorating a class, but implementend as a function, or implemented as a class, but decorating a function. My goal is to create decorators implemented as classes and decorate classes.
To be more specific, I want to create a #Logger decorator and use it in some of my classes. What this decorator would do is simply inject a self.logger attribute in the class, so everytime I decorate a class with #Logger I'll be able to self.logger.debug() in its methods.
Some initial questions:
What does the decorator's __init__ receive as parameters? I it would receive only the decorated class and some eventual decorator parameters, and that's actually what happens for most of the cases, but please take a look at the output below for the DOMElementFeatureExtractor. Why does it received all those parameters?
What about the __call__ method? What will it receive?
How can I provide a parameter for the decorator (#Logger(x='y'))? Will it be passed to the __init__ method?
Should I really be returning an instance of the class in the __call__ method? (only way I could make it work)
What about chaining decorators? How would that work if the previous decorator already returned an instance of the class? What should I fix in the example below in order to be able to #Logger #Counter MyClass:?
Please take a look at this example code. I've created some dummy examples, but in the end you can see some code from my real project.
You can find the output at the end.
Any help to understand Python classes decorators implemented as a class would be much appreciated.
Thank you
from abc import ABC, abstractmethod
class ConsoleLogger:
def __init__(self):
pass
def info(self, message):
print(f'INFO {message}')
def warning(self, message):
print(f'WARNING {message}')
def error(self, message):
print(f'ERROR {message}')
def debug(self, message):
print(f'DEBUG {message}')
class Logger(object):
""" Logger decorator, adds a 'logger' attribute to the class """
def __init__(self, cls, *args, **kwargs):
print(cls, *args, **kwargs)
self.cls = cls
def __call__(self, *args, **kwargs):
print(self.cls.__name__)
logger = ConsoleLogger()
setattr(self.cls, 'logger', logger)
return self.cls(*args, **kwargs)
class Counter(object):
""" Counter decorator, counts how many times a class has been instantiated """
count = 0
def __init__(self, cls, *args, **kwargs):
self.cls = cls
def __call__(self, *args, **kwargs):
count += 1
print(f'Class {self.cls} has been initialized {count} times')
return self.cls(*args, **kwargs)
#Logger
class A:
""" Simple class, no inheritance, no arguments in the constructor """
def __init__(self):
self.logger.info('Class A __init__()')
class B:
""" Parent class for B1 """
def __init__(self):
pass
#Logger
class B1(B):
""" Child class, still no arguments in the constructor """
def __init__(self):
super().__init__()
self.logger.info('Class B1 __init__()')
class C(ABC):
""" Abstract class """
def __init__(self):
super().__init__()
#abstractmethod
def do_something(self):
pass
#Logger
class C1(C):
""" Concrete class, implements C """
def __init__(self):
self.logger.info('Class C1 __init__()')
def do_something(self):
self.logger.info('something')
#Logger
class D:
""" Class receives parameter on intantiation """
def __init__(self, color):
self.color = color
self.logger.info('Class D __init__()')
self.logger.debug(f'color = {color}')
class AbstractGenerator(ABC):
def __init__(self):
super().__init__()
self.items = None
self.next_item = None
#abstractmethod
def __iter__(self):
pass
def __next__(self):
pass
def __len__(self):
pass
def __getitem__(self, key):
pass
class AbstractDOMElementExtractor(AbstractGenerator):
def __init__(self, parameters, content):
super().__init__()
self.parameters = parameters
self.content = content
#Logger
class DOMElementExtractor(AbstractDOMElementExtractor):
def __init__(self, parameters, content):
super().__init__(parameters, content)
def __iter__(self):
self.logger.debug('__iter__')
def __next__(self):
self.logger.debug('__next__')
def __len__(self):
self.logger.debug('__len__')
def __getitem__(self, key):
self.logger.debug('__getitem__')
class DOMElementFeatureExtractor(DOMElementExtractor):
def __init__(self, parameters, content):
super().__init__(parameters, content)
class DocumentProcessor:
def __init__(self):
self.dom_element_extractor = DOMElementExtractor(parameters={}, content='')
def process(self):
self.dom_element_extractor.__iter__()
a = A()
b1 = B1()
c1 = C1()
c1.do_something()
d = D(color='Blue')
document_processor = DocumentProcessor()
document_processor.process()
Output:
<class '__main__.A'>
<class '__main__.B1'>
<class '__main__.C1'>
<class '__main__.D'>
<class '__main__.DOMElementExtractor'>
DOMElementFeatureExtractor (<__main__.Logger object at 0x7fae27c26400>,) {'__module__': '__main__', '__qualname__': 'DOMElementFeatureExtractor', '__init__': <function DOMElementFeatureExtractor.__init__ at 0x7fae27c25840>, '__classcell__': <cell at 0x7fae27cf09d8: empty>}
A
INFO Class A __init__()
B1
INFO Class B1 __init__()
C1
INFO Class C1 __init__()
INFO something
D
INFO Class D __init__()
DEBUG color = Blue
DOMElementExtractor
DEBUG __iter__
Won't be a full answer, but I think it's helpful to review the basics of a decorator. This is what decorating looks like:
#Logger
class A:
# A's code
By definition, it's equivalent to doing this:
class A
# A's code
A = Logger(A) # Logger has to be callable because...it's called
Sources often say that decorators "modify", but that's really just the intended use. Technically, all you need is A to have a definition (so a function, method, or class) and Logger to be callable. If Logger returned "Hello, World", that's what A becomes.
Okay, let's pretend we didn't decorate A for a bit and think about what it would take for Logger(A) to be "modifying." Well, A is a class, and you call a class to create instances: A(*args). Therefore, Logger(A)(*args) must also be instances of A. But Logger(A) isn't the class A, it's an instance of Logger. Luckily, you can make instances callable by defining the __call__ method in its class. Logger's __call__ method calls the class stored in its cls attribute and returns the instance.
As for parameters in a decorator, it also helps to think about what it's equivalent to. You're interested in doing this:
#Logger(x='y')
class A:
# A code
So it's equivalent to this:
class A:
# A code
A = Logger(x = 'y')(A)
Note that Logger itself is not taking A as an argument. It's taking 'y' as an argument and returning another callable that takes A as an argument. So if Logger is a class, Logger(x = 'y') would be a Logger instance. Instances of a class can also serve as decorators if the class has a __call__ method!
I want to do something like:
class A(Resource):
#dec(from_file=A.docpath)
def get(self):
pass
class B(A):
docpath = './docs/doc_for_get_b.json'
class C(A):
docpath = './docs/doc_for_get_c.json'
def dec(*args, **kwargs):
def inner(f):
docpath = kwargs.get('from_file')
f.__kwargs__ = open(path, 'r').read()
return f
return inner
The functions that will be called are B.get and C.get, never A.get.
How can I access the custom attribute docpath defined in class B or class C and pass it to the decorator of the get function in class A ?
Current solution: Put the decorator on each derived class ...
class A(Resource):
def _get(self):
pass
class B(A):
#dec(from_file='./docs/doc_for_get_b.json')
def get(self):
return self._get()
class C(A)
#dec(from_file='./docs/doc_for_get_c.json')
def get(self):
return self._get()
This works but it's pretty ugly compared to the one-line declaration of the classes in the previous code.
To access a class's attributes inside the decorator is easy:
def decorator(function):
def inner(self):
self_type = type(self)
# self_type is now the class of the instance of the method that this
# decorator is wrapping
print('The class attribute docpath is %r' % self_type.docpath)
# need to pass self through because at the point function is
# decorated it has not been bound to an instance, and so it is just a
# normal function which takes self as the first argument.
function(self)
return inner
class A:
docpath = "A's docpath"
#decorator
def a_method(self):
print('a_method')
class B(A):
docpath = "B's docpath"
a = A()
a.a_method()
b = B()
b.a_method()
In general I've found using multiple levels of decorators, i.e. decorator factory functions that create decorators such as you've used and such as:
def decorator_factory(**kwargs):
def decorator_function(function):
def wrapper(self):
print('Wrapping function %s with kwargs %s' % (function.__name__, kwargs))
function(self)
return wrapper
return decorator_function
class A:
#decorator_factory(a=2, b=3)
def do_something(self):
print('do_something')
a = A()
a.do_something()
a difficult thing to get right and not easy to comprehend when reading code, so I would err towards using class attributes and generic superclass methods in favour of lots of decorators.
So in your case, don't pass the file path in as an argument to your decorator factory, but set it as a class attribute on your derived classes, and then write a generic method in your superclass that reads the class attribute from the instance's class.
I wrote a Python module, with several classes that inherit from a single class called MasterBlock.
I want to import this module in a script, create several instances of these classes, and then get a list of all the existing instances of all the childrens of this MasterBlock class. I found some solutions with vars()['Blocks.MasterBlock'].__subclasses__() but as the instances I have are child of child of MasterBlock, it doesn't work.
Here is some example code:
Module:
Class MasterBlock:
def main(self):
pass
Class RandomA(MasterBlock):
def __init__(self):
pass
# inherit the main function
Class AnotherRandom(MasterBlock):
def __init__(self):
pass
# inherit the main function
Script:
import module
a=module.RandomA()
b=module.AnotherRandom()
c=module.AnotherRandom()
# here I need to get list_of_instances=[a,b,c]
Th ultimate goal is to be able to do:
for instance in list_of_instances:
instance.main()
If you add a __new__() method as shown below to your base class which keeps track of all instances created in a class variable, you could make the process more-or-less automatic and not have to remember to call something in the __init__() of each subclass.
class MasterBlock(object):
instances = []
def __new__(cls, *args, **kwargs):
instance = super(MasterBlock, cls).__new__(cls, *args, **kwargs)
instance.instances.append(instance)
return instance
def main(self):
print('in main of', self.__class__.__name__) # for testing purposes
class RandomA(MasterBlock):
def __init__(self):
pass
# inherit the main function
class AnotherRandom(RandomA): # works for sub-subclasses, too
def __init__(self):
pass
# inherit the main function
a=RandomA()
b=AnotherRandom()
c=AnotherRandom()
for instance in MasterBlock.instances:
instance.main()
Output:
in main of RandomA
in main of AnotherRandom
in main of AnotherRandom
What about adding a class variable, that contains all the instances of MasterBlock? You can record them with:
Class MasterBlock(object):
all_instances = [] # All instances of MasterBlock
def __init__(self,…):
…
self.all_instances.append(self) # Not added if an exception is raised before
You get all the instances of MasterBlock with MasterBlock.all_instances (or instance.all_instances).
This works if all base classes call the __init__ of the master class (either implicitly through inheritance or explicitly through the usual super() call).
Here's a way of doing that using a class variable:
class MasterBlock(object):
instances = []
def __init__(self):
self.instances.append(self)
def main(self):
print "I am", self
class RandomA(MasterBlock):
def __init__(self):
super(RandomA, self).__init__()
# other init...
class AnotherRandom(MasterBlock):
def __init__(self):
super(AnotherRandom, self).__init__()
# other init...
a = RandomA()
b = AnotherRandom()
c = AnotherRandom()
# here I need to get list_of_instances=[a,b,c]
for instance in MasterBlock.instances:
instance.main()
(you can make it simpler if you don't need __init__ in the subclasses)
output:
I am <__main__.RandomA object at 0x7faa46683610>
I am <__main__.AnotherRandom object at 0x7faa46683650>
I am <__main__.AnotherRandom object at 0x7faa46683690>
Is it possible to leave a parent class unspecified until an instance is created?
e.g. something like this:
class SomeParentClass:
# something
class Child(unspecifiedParentClass):
# something
instance = Child(SomeParentClass)
This obviously does not work. But is it possible to do this somehow?
You can change the class of an instance in the class' __init__() method:
class Child(object):
def __init__(self, baseclass):
self.__class__ = type(self.__class__.__name__,
(baseclass, object),
dict(self.__class__.__dict__))
super(self.__class__, self).__init__()
print 'initializing Child instance'
# continue with Child class' initialization...
class SomeParentClass(object):
def __init__(self):
print 'initializing SomeParentClass instance'
def hello(self):
print 'in SomeParentClass.hello()'
c = Child(SomeParentClass)
c.hello()
Output:
initializing SomeParentClass instance
initializing Child instance
in SomeParentClass.hello()
Have you tried something like this?
class SomeParentClass(object):
# ...
pass
def Child(parent):
class Child(parent):
# ...
pass
return Child()
instance = Child(SomeParentClass)
In Python 2.x, also be sure to include object as the parent class's superclass, to use new-style classes.
You can dynamically change base classes at runtime. Such as:
class SomeParentClass:
# something
class Child():
# something
def change_base_clase(base_class):
return type('Child', (base_class, object), dict(Child.__dict__))()
instance = change_base_clase(SomeParentClass)
For example:
class Base_1:
def hello(self):
print('hello_1')
class Base_2:
def hello(self):
print('hello_2')
class Child:pass
def add_base(base):
return type('Child', (base, object), dict(Child.__dict__))()
# if you want change the Child class, just:
def change_base(base):
global Child
Child = type('Child', (base, object), dict(Child.__dict__))
def main():
c1 = add_base(Base_1)
c2 = add_base(Base_2)
c1.hello()
c2.hello()
main()
Result:
hello_1
hello_2
Works well in both python 2 and 3.
For more information, see the related question How to dynamically change base class of instances at runtime?