I am trying to create a mixin class that has it's own properties, but as the class has no init to initialize the "hidden" variable behind the property.
class Software:
__metaclass__ = ABCMeta
#property
def volumes(self):
return self._volumes
#volumes.setter
def volumes(self, value):
pass
class Base(object):
def __init__(self):
self._volumes = None
class SoftwareUser(Base, Software):
def __init__(self):
super(Base, self).__init__()
So above is the best that I have come up with to solve this but the reality is that the _volumes dosn't really belong in the base. I could add an init to the Software class but then the super call wont work on both mixins.
The second is that I will need multiple mixins dependent on the incoming call they will always need the base, but the mixins will change so I dont really want variables from mixins that aren't mixed in for that call.
Is there a way that i can have the mixin add it's variables to the class if it is mixed in perhaps dynamically call the init of the mixin class ?.
Any questions let me know.
Thanks
Yes, that's wildly overcomplicated. A class (including mixins) should only be responsible for calling the next implementation in the MRO, not marshalling all of them. Try:
class Software:
#property
def volumes(self):
return self._volumes
#volumes.setter
def volumes(self, value):
pass
def __init__(self):
self._volumes = None
super().__init__() # mixin calls super too
class Base(object):
def __init__(self):
other_vars = None
class SoftwareUser(Software, Base): # note order
def __init__(self):
super().__init__() # all you need here
Ok so here is what I came up with, I am open to other answers, if I have made this way over complicated.
class Software:
#property
def volumes(self):
return self._volumes
#volumes.setter
def volumes(self, value):
pass
def __init__(self):
self._volumes = None
class Base(object):
def __init__(self):
other_vars = None
class SoftwareUser(Base, Software):
def _bases_init(self, *args, **kwargs):
for base in type(self).__bases__:
base.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs):
self._bases_init(*args, **kwargs)
Related
Need some help to implement/understand how decorators as a class work in Python. Most examples I've found are either decorating a class, but implementend as a function, or implemented as a class, but decorating a function. My goal is to create decorators implemented as classes and decorate classes.
To be more specific, I want to create a #Logger decorator and use it in some of my classes. What this decorator would do is simply inject a self.logger attribute in the class, so everytime I decorate a class with #Logger I'll be able to self.logger.debug() in its methods.
Some initial questions:
What does the decorator's __init__ receive as parameters? I it would receive only the decorated class and some eventual decorator parameters, and that's actually what happens for most of the cases, but please take a look at the output below for the DOMElementFeatureExtractor. Why does it received all those parameters?
What about the __call__ method? What will it receive?
How can I provide a parameter for the decorator (#Logger(x='y'))? Will it be passed to the __init__ method?
Should I really be returning an instance of the class in the __call__ method? (only way I could make it work)
What about chaining decorators? How would that work if the previous decorator already returned an instance of the class? What should I fix in the example below in order to be able to #Logger #Counter MyClass:?
Please take a look at this example code. I've created some dummy examples, but in the end you can see some code from my real project.
You can find the output at the end.
Any help to understand Python classes decorators implemented as a class would be much appreciated.
Thank you
from abc import ABC, abstractmethod
class ConsoleLogger:
def __init__(self):
pass
def info(self, message):
print(f'INFO {message}')
def warning(self, message):
print(f'WARNING {message}')
def error(self, message):
print(f'ERROR {message}')
def debug(self, message):
print(f'DEBUG {message}')
class Logger(object):
""" Logger decorator, adds a 'logger' attribute to the class """
def __init__(self, cls, *args, **kwargs):
print(cls, *args, **kwargs)
self.cls = cls
def __call__(self, *args, **kwargs):
print(self.cls.__name__)
logger = ConsoleLogger()
setattr(self.cls, 'logger', logger)
return self.cls(*args, **kwargs)
class Counter(object):
""" Counter decorator, counts how many times a class has been instantiated """
count = 0
def __init__(self, cls, *args, **kwargs):
self.cls = cls
def __call__(self, *args, **kwargs):
count += 1
print(f'Class {self.cls} has been initialized {count} times')
return self.cls(*args, **kwargs)
#Logger
class A:
""" Simple class, no inheritance, no arguments in the constructor """
def __init__(self):
self.logger.info('Class A __init__()')
class B:
""" Parent class for B1 """
def __init__(self):
pass
#Logger
class B1(B):
""" Child class, still no arguments in the constructor """
def __init__(self):
super().__init__()
self.logger.info('Class B1 __init__()')
class C(ABC):
""" Abstract class """
def __init__(self):
super().__init__()
#abstractmethod
def do_something(self):
pass
#Logger
class C1(C):
""" Concrete class, implements C """
def __init__(self):
self.logger.info('Class C1 __init__()')
def do_something(self):
self.logger.info('something')
#Logger
class D:
""" Class receives parameter on intantiation """
def __init__(self, color):
self.color = color
self.logger.info('Class D __init__()')
self.logger.debug(f'color = {color}')
class AbstractGenerator(ABC):
def __init__(self):
super().__init__()
self.items = None
self.next_item = None
#abstractmethod
def __iter__(self):
pass
def __next__(self):
pass
def __len__(self):
pass
def __getitem__(self, key):
pass
class AbstractDOMElementExtractor(AbstractGenerator):
def __init__(self, parameters, content):
super().__init__()
self.parameters = parameters
self.content = content
#Logger
class DOMElementExtractor(AbstractDOMElementExtractor):
def __init__(self, parameters, content):
super().__init__(parameters, content)
def __iter__(self):
self.logger.debug('__iter__')
def __next__(self):
self.logger.debug('__next__')
def __len__(self):
self.logger.debug('__len__')
def __getitem__(self, key):
self.logger.debug('__getitem__')
class DOMElementFeatureExtractor(DOMElementExtractor):
def __init__(self, parameters, content):
super().__init__(parameters, content)
class DocumentProcessor:
def __init__(self):
self.dom_element_extractor = DOMElementExtractor(parameters={}, content='')
def process(self):
self.dom_element_extractor.__iter__()
a = A()
b1 = B1()
c1 = C1()
c1.do_something()
d = D(color='Blue')
document_processor = DocumentProcessor()
document_processor.process()
Output:
<class '__main__.A'>
<class '__main__.B1'>
<class '__main__.C1'>
<class '__main__.D'>
<class '__main__.DOMElementExtractor'>
DOMElementFeatureExtractor (<__main__.Logger object at 0x7fae27c26400>,) {'__module__': '__main__', '__qualname__': 'DOMElementFeatureExtractor', '__init__': <function DOMElementFeatureExtractor.__init__ at 0x7fae27c25840>, '__classcell__': <cell at 0x7fae27cf09d8: empty>}
A
INFO Class A __init__()
B1
INFO Class B1 __init__()
C1
INFO Class C1 __init__()
INFO something
D
INFO Class D __init__()
DEBUG color = Blue
DOMElementExtractor
DEBUG __iter__
Won't be a full answer, but I think it's helpful to review the basics of a decorator. This is what decorating looks like:
#Logger
class A:
# A's code
By definition, it's equivalent to doing this:
class A
# A's code
A = Logger(A) # Logger has to be callable because...it's called
Sources often say that decorators "modify", but that's really just the intended use. Technically, all you need is A to have a definition (so a function, method, or class) and Logger to be callable. If Logger returned "Hello, World", that's what A becomes.
Okay, let's pretend we didn't decorate A for a bit and think about what it would take for Logger(A) to be "modifying." Well, A is a class, and you call a class to create instances: A(*args). Therefore, Logger(A)(*args) must also be instances of A. But Logger(A) isn't the class A, it's an instance of Logger. Luckily, you can make instances callable by defining the __call__ method in its class. Logger's __call__ method calls the class stored in its cls attribute and returns the instance.
As for parameters in a decorator, it also helps to think about what it's equivalent to. You're interested in doing this:
#Logger(x='y')
class A:
# A code
So it's equivalent to this:
class A:
# A code
A = Logger(x = 'y')(A)
Note that Logger itself is not taking A as an argument. It's taking 'y' as an argument and returning another callable that takes A as an argument. So if Logger is a class, Logger(x = 'y') would be a Logger instance. Instances of a class can also serve as decorators if the class has a __call__ method!
I have something like this:
class SuperClass(object):
def __init__(self):
# initialization stuff
def always_do_this_last(self):
# cleanup stuff
class SubClass(SuperClass):
def __init__(self):
super().__init__()
# intermediate stuff
self.always_do_this_last()
Is it possible to automatically call that last line? Every subclass of SuperClass needs perform the cleanup.
Instead of overriding __init__, define a method that SuperClass.__init__ will call.
class SuperClass(object):
def __init__(self):
# do some stuff
self.child_init()
self.cleanup()
def cleanup():
...
def child_init(self):
pass
class SubClass(SuperClass):
def child_init(self):
...
You can define SuperClass.__init_subclass__ to ensure child_init is overriden, or use the abc module to make SuperClass.child_init an abstract method
One option could be to use a method that the subclasses could override without overriding __init__(). Maybe like this:
class SuperClass:
def __init__(self):
# initialization stuff
self.setup_subclass()
self.always_do_this_last()
def setup_subclass(self):
pass
def always_do_this_last(self):
# cleanup stuff
class SubClass(SuperClass):
def setup_subclass(self):
# intermediate stuff
Would that work for you?
You have 2 options:
Use a different method as your initializer and call always_do_this_last afterwards
class SuperClass(object):
def __init__(self):
self._init() # initialize
self.always_do_this_last() # clean up
def _init(self):
pass # initialization stuff
def always_do_this_last(self):
pass # cleanup stuff
class SubClass(SuperClass):
def _init(self):
super()._init()
# intermediate stuff
Use a metaclass
class CleanupMeta(type):
def __call__(cls, *args, **kwargs):
obj = super().__call__(*args, **kwargs)
obj.always_do_this_last()
return obj
class SuperClass(metaclass=CleanupMeta):
def __init__(self):
pass # initialization stuff
def always_do_this_last(self):
pass # cleanup stuff
class SubClass(SuperClass):
def __init__(self):
super().__init__()
# intermediate stuff
The other answers here are more than sufficient. I will add that you might want to have a look at the abstract base class if you are implementing a class that requires certain member functions to be implemented.
In the example below the parent requires the initialize and cleanup methods to be defined in each child (try removing one of them to verify an error is raised).
import abc
class SuperClass(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
print("Instantiating Class")
self.initialize()
self.cleanup()
#abc.abstractmethod
def initialize(self):
pass
#abc.abstractmethod
def cleanup(self):
pass
class SubClass(SuperClass):
def __init__(self):
super(SubClass, self).__init__()
def initialize(self):
print("initializing...")
def cleanup(self):
print("... cleanup.")
a = SubClass()
I can't figure out the correct way to model this problem.
Here I give you a minimalistic version of my code:
# -*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
class AreaCalculator():
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getArea(self):
pass
def compute(self):
self.getArea()
class PerimeterCalculator():
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getPerimeter(self):
pass
def compute(self):
self.getPerimeter()
class TriangleAreaCalculator(AreaCalculator):
def __init__(self):
AreaCalculator.__init__(self)
def getArea(self):
return area
class TrianglePerimeterCalculator(PerimeterCalculator):
def __init__(self):
PerimeterCalculator.__init__(self)
def getPerimeter(self):
return perimeter
a = TriangleAreaCalculator()
b = TrianglePerimeterCalculator()
Is there an elegant way to merge "TrianglePerimeterCalculator" and "TriangleAreaCalculator" classes into one, but keeping "PerimeterCalculator" and "AreaCalculator" separated?
[edit] As Kyle suggested in the comments, I can create a new class (let's call it "Triangle") that inherits from "PerimeterCalculator" and "AreaCalculator" at the same time, but what I want is to be able to tell a new instance of "Triangle" to behave as "PerimeterCalculator" or "AreaCalculator", but not both at the same time.
I think the "design pattern" you should use is multiple inheritance. Below is a modified version of your code demonstrating how do it (plus a few other changes to make it actually runnable and all classes new-style).
from abc import ABCMeta, abstractmethod
class AreaCalculator(object):
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getArea(self):
pass
def compute(self):
self.getArea()
class PerimeterCalculator(object):
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getPerimeter(self):
pass
def compute(self):
self.getPerimeter()
class TriangleAreaCalculator(AreaCalculator):
def __init__(self):
super(TriangleAreaCalculator, self).__init__()
def getArea(self):
print('TriangleAreaCalculator.getArea() called on instance of {}'.format(
self.__class__.__name__))
# return area
return 13
class TrianglePerimeterCalculator(PerimeterCalculator):
def __init__(self):
super(TrianglePerimeterCalculator, self).__init__()
def getPerimeter(self):
print('TrianglePerimeterCalculator.getPerimeter() called on instance of {}'.format(
self.__class__.__name__))
# return perimeter
return 42
class MergedCalculator(TriangleAreaCalculator, TrianglePerimeterCalculator):
def __init__(self):
super(MergedCalculator, self).__init__()
merged = MergedCalculator()
print('merged.getArea() -> {}'.format(merged.getArea()))
print('merged.getPerimeter() -> {}'.format(merged.getPerimeter()))
Output:
TriangleAreaCalculator.getArea() called on instance of MergedCalculator
merged.getArea() -> 13
TrianglePerimeterCalculator.getPerimeter() called on instance of MergedCalculator
merged.getPerimeter() -> 42
Here's another answer, following the editing and clarification of your question. It allows creation of a single Triangle instance that can behave like either an AreaCalculator or PerimeterCalculator, as needed.
This programming pattern is called "delegation" and is used where the responsibility for implementing a particular operation is handed off to a different object—in this case an internally held instance of some other class. A common way to do this in Python is by overriding the class's default __getattr__() method.
Since you've never responded to the comment under my other answer about exactly what it is that controls which behavior is used, I added a set_behavior() method to allow it to be specified explicitly.
from abc import ABCMeta, abstractmethod
class AreaCalculator:
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getArea(self):
pass
def compute(self):
return self.getArea()
class PerimeterCalculator:
__metaclass__ = ABCMeta
def __init__(self):
pass
#abstractmethod
def getPerimeter(self):
pass
def compute(self):
return self.getPerimeter()
class TriangleAreaCalculator(AreaCalculator):
def __init__(self):
super(TriangleAreaCalculator, self).__init__()
def getArea(self):
print('TriangleAreaCalculator.getArea() called')
area = 13
return area
class TrianglePerimeterCalculator(PerimeterCalculator):
def __init__(self):
super(TrianglePerimeterCalculator, self).__init__()
def getPerimeter(self):
print('TrianglePerimeterCalculator.getPerimeter() called')
perimeter = 42
return perimeter
class Triangle:
def __init__(self):
delegate_classes = TriangleAreaCalculator, TrianglePerimeterCalculator
# Map delegate classes to instances of themselves.
self._delegates = {delegate_class: delegate_class()
for delegate_class in delegate_classes}
self.set_behavior(TriangleAreaCalculator) # Set default delegate.
def __getattr__(self, attrname):
# Called only for attributes not defined by this class (or its bases).
# Retrieve attribute from current behavior delegate class instance.
return getattr(self._behavior, attrname)
def set_behavior(self, delegate_class):
try:
self._behavior = self._delegates[delegate_class]
except KeyError:
raise TypeError("{} isn't a valid {} behavior delegate class"
.format(delegate_class, self.__class__.__name__))
if __name__ == '__main__':
triangle = Triangle()
# Uses instance's default behavior.
print('triangle.compute() -> {}'.format(triangle.compute()))
triangle.set_behavior(TrianglePerimeterCalculator) # Change behavior.
print('triangle.compute() -> {}'.format(triangle.compute()))
Output:
TriangleAreaCalculator.getArea() called
triangle.compute() -> 13
TrianglePerimeterCalculator.getPerimeter() called
triangle.compute() -> 42
I figured it out myself, with inspiration on the commentas/answers of Kyle and martineau.
I can create a merged class "Triangle" as follows:
class Triangle():
def __init__(self):
pass
def getTriangleArea(self):
print 'Triangle area'
def getTrianglePerimeter(self):
print 'Triangle perimeter'
And then modify TriangleAreaCalculator and TrianglePerimeterCalculator as follows:
class TriangleAreaCalculator(AreaCalculator, Triangle):
def __init__(self):
TriangleCalculator.__init__(self)
AreaCalculator.__init__(self)
def getArea(self):
super(TriangleAreaCalculator, self).getTriangleArea()
class TrianglePerimeterCalculator(PerimeterCalculator, Triangle):
def __init__(self):
TriangleCalculator.__init__(self)
PerimeterCalculator.__init__(self)
def getPerimeter(self):
super(TrianglePerimeterCalculator, self).getTrianglePerimeter()
This way, I can create a new Triangle-like instance that behaves as "PerimeterCalculator" or "AreaCalculator" (but not both at the same time):
a = TriangleAreaCalculator()
b = TrianglePerimeterCalculator()
a.compute() # correctly prints "Triangle area"
b.compute() # correctly prints "Triangle perimeter"
I have some code that looks like this:
class Log(object):
#property
def log(self):
return self.log
class ExampleClass2(ExampleClass, Log):
class ExampleClass3(object):
#property
def log_value(self):
self.log.info('Hi!')
However I'm getting an error,
'ExampleClass3' object has not attribute 'log'
I'm guessing I need to add an __init__() method to DEF, and I've tried using
super(ExampleClass2.ExampleClass3, self).__init__()
but I'm still having problems accessing log. Any suggestions?
I believe to get your desired behavior, you need need to pass in an instance of ExampleClass2 when you create an instance of ExampleClass3.
class OuterClass:
def __init__(self, value):
self.value = value
class InnerClass:
def __init__(self, instance):
self.instance = instance
def inner_print_value(self):
print self.instance.value
def outer_print_value(self):
printer = OuterClass.InnerClass(self)
printer.inner_print_value()
OuterClass('Hi').outer_print_value() # 'Hi'
As noted in the comments, there is rarely a reason for this kind of structure. It would be easier to create InnerClass outside of the definition of OuterClass.
class OuterClass:
def __init__(self, value):
self.value = value
def outer_print_value(self):
printer = InnerClass(self)
printer.inner_print_value()
class InnerClass:
def __init__(self, instance):
self.instance = instance
def inner_print_value(self):
print self.instance.value
It seems like you're expecting the value of self to be augmented when creating an inner-class, but this is not the case. To do this, you'd want to use inheritance, and that doesn't require nested classes either.
I have python class trees, each made up of an abstract base class and many deriving concrete classes. I want all concrete classes to be accessible through a base-class method, and I do not want to specify anything during child-class creation.
This is what my imagined solution looks like:
class BaseClassA(object):
# <some magic code around here>
#classmethod
def getConcreteClasses(cls):
# <some magic related code here>
class ConcreteClassA1(BaseClassA):
# no magic-related code here
class ConcreteClassA2(BaseClassA):
# no magic-related code here
As much as possible, I'd prefer to write the "magic" once as a sort of design pattern. I want to be able to apply it to different class trees in different scenarios (i.e. add a similar tree with "BaseClassB" and its concrete classes).
Thanks Internet!
you can use meta classes for that:
class AutoRegister(type):
def __new__(mcs, name, bases, classdict):
new_cls = type.__new__(mcs, name, bases, classdict)
#print mcs, name, bases, classdict
for b in bases:
if hasattr(b, 'register_subclass'):
b.register_subclass(new_cls)
return new_cls
class AbstractClassA(object):
__metaclass__ = AutoRegister
_subclasses = []
#classmethod
def register_subclass(klass, cls):
klass._subclasses.append(cls)
#classmethod
def get_concrete_classes(klass):
return klass._subclasses
class ConcreteClassA1(AbstractClassA):
pass
class ConcreteClassA2(AbstractClassA):
pass
class ConcreteClassA3(ConcreteClassA2):
pass
print AbstractClassA.get_concrete_classes()
I'm personnaly very wary of this kind of magic. Don't put too much of this in your code.
Here is a simple solution using modern python's (3.6+) __init__subclass__ defined in PEP 487. It allows you to avoid using a meta-class.
class BaseClassA(object):
_subclasses = []
#classmethod
def get_concrete_classes(cls):
return list(cls._subclasses)
def __init_subclass__(cls):
BaseClassA._subclasses.append(cls)
class ConcreteClassA1(BaseClassA):
pass # no magic-related code here
class ConcreteClassA2(BaseClassA):
pass # no magic-related code here
print(BaseClassA.get_concrete_classes())
You should know that part of the answer you're looking for is built-in. New-style classes automatically keep a weak reference to all of their child classes which can be accessed with the __subclasses__ method:
#classmethod
def getConcreteClasses(cls):
return cls.__subclasses__()
This won't return sub-sub-classes. If you need those, you can create a recursive generator to get them all:
#classmethod
def getConcreteClasses(cls):
for c in cls.__subclasses__():
yield c
for c2 in c.getConcreteClasses():
yield c2
Another way to do this, with a decorator, if your subclasses are either not defining __init__ or are calling their parent's __init__:
def lister(cls):
cls.classes = list()
cls._init = cls.__init__
def init(self, *args, **kwargs):
cls = self.__class__
if cls not in cls.classes:
cls.classes.append(cls)
cls._init(self, *args, **kwargs)
cls.__init__ = init
#classmethod
def getclasses(cls):
return cls.classes
cls.getclasses = getclasses
return cls
#lister
class A(object): pass
class B(A): pass
class C(A):
def __init__(self):
super(C, self).__init__()
b = B()
c = C()
c2 = C()
print 'Classes:', c.getclasses()
It will work whether or not the base class defines __init__.