Wrap all methods of python superclass - python

Is there a way to wrap all methods of a superclass, if I can't change its code?
As a minimal working example, consider this base class Base, which has many methods that return a new instance of itself, and the descendent class Child
class Base:
def __init__(self, val):
self.val = val
def newinst_addseven(self):
return Base(self.val + 7)
def newinst_timestwo(self):
return Base(self.val * 2)
# ...
class Child(Base):
#property
def sqrt(self):
return math.sqrt(self.val)
The issue here is that calling childinstance.newinst_addseven() returns an instance of Base, instead of Child.
Is there a way to wrap the Base class's methods to force a return value of the type Child?
With something like this wrapper:
def force_child_i(result):
"""Turn Base instance into Child instance."""
if type(result) is Base:
return Child(result.val)
return result
def force_child_f(fun):
"""Turn from Base- to Child-instance-returning function."""
def wrapper(*args, **kwargs):
result = fun(*args, **kwargs)
return force_child_i(result)
return wrapper
Many thanks!
PS: What I currently do, is look at Base's source code and add the methods to Child directly, which is not very mainainable:
Child.newinst_addseven = force_child_f(Base.newinst_addseven)
Child.newinst_timestwo = force_child_f(Base.newinst_timestwo)

One option is to use a metaclass:
class ChildMeta(type):
def __new__(cls, name, bases, dct):
child = super().__new__(cls, name, bases, dct)
for base in bases:
for field_name, field in base.__dict__.items():
if callable(field):
setattr(child, field_name, force_child(field))
return child
class Child(Base, metaclass=ChildMeta):
pass
It will automatically wrap all the Bases methods with your force_child decorator.

Related

how to access outer class properties inside the inner classes?

class Remote:
aa=7
def __init__(self):
self.name="Lenovo"
self.b=self.Battery()
print("this is outer",self.b.t)
class Battery:
def __init__(self):
self.name="Hp"
self.t="df"
self.c=self.Cover()
class Cover:
def __init__(self):
self.name="Arplastic"
c1=Remote()
I knew today about inner class but i don't know how to i access properties and methods of outer class into inner class please let me know anyone.
Change the constructor(s) of the inner class(es) to accept a parent argument and have the creating instance pass itself to it:
class Remote:
aa=7
def __init__(self):
self.name="Lenovo"
self.b=self.Battery(self)
print("this is outer",self.b.t)
class Battery:
def __init__(self,parent):
self.name="Hp"
self.t="df"
self.c=self.Cover(self)
self.parent=parent
class Cover:
def __init__(self,parent):
self.name="Arplastic"
self.parent=parent
c1=Remote()
print(c1.b.c.parent.parent.name) # prints 'Lenovo'
One approach is to make a metaclass that automatically creates self.parent attributes for nested classes. Note that there is a trade-off between readability and boilerplate here - many programmers would rather you just manually pass parents as arguments and add them to __init__ methods. This is more fun though, and there is something to be said for having less cluttered code.
Here is the code:
import inspect
def inner_class(cls):
cls.__is_inner_class__ = True
return cls
class NestedClass(type):
def __new__(metacls, name, bases, attrs, parent=None):
attrs = dict(attrs.items())
super_getattribute = attrs.get('__getattribute__', object.__getattribute__)
inner_class_cache = {}
def __getattribute__(self, attr):
val = super_getattribute(self, attr)
if inspect.isclass(val) and getattr(val, '__is_inner_class__', False):
if (self, val) not in inner_class_cache:
inner_class_cache[self, val] = NestedClass(val.__name__, val.__bases__, val.__dict__, parent=self)
return inner_class_cache[self, val]
else:
return val
attrs['__getattribute__'] = __getattribute__
attrs['parent'] = parent
return type(name, bases, attrs)
class Remote(metaclass=NestedClass):
aa = 7
def __init__(self):
self.name = "Lenovo"
self.b = self.Battery()
print("this is outer", self.b.t)
#inner_class
class Battery:
def __init__(self):
self.name = "Hp"
self.t = "df"
self.c = self.Cover()
#inner_class
class Cover:
def __init__(self):
self.name = "Arplastic"
print(f'{self.parent=}, {self.parent.parent=}')
c1 = Remote()
print(f'{c1.b.c.parent.parent is c1=}')
print(f'{isinstance(c1.b, c1.Battery)=}')
Output:
self.parent=<__main__.Battery object at 0x7f11e74936a0>, self.parent.parent=<__main__.Remote object at 0x7f11e7493730>
this is outer df
c1.b.c.parent.parent is c1=True
isinstance(c1.b, c1.Battery)=True
The way this works is by storing the parent as a class attribute (which is None by default), and replacing the __getattribute__ method so that all inner classes are replaced with NestedClasses with the parent attribute correctly filled in.
The inner_class decorator is used to mark a class as an inner class by setting the __is_inner_class__ attribute.
def inner_class(cls):
cls.__is_inner_class__ = True
return cls
This is not strictly necessary if all attributes that are classes should be treated as inner classes, but it's good practice to do something like this to prevent Bar.foo being treated as an inner class in this example:
class Foo:
pass
class Bar(metaclass=NestedClass):
foo = Foo
All the NestedClass metaclass does is take the description of the class and modify it, adding the parent attribute:
class NestedClass(type):
def __new__(metacls, name, bases, attrs, parent=None):
attrs = dict(attrs.items())
...
attrs['parent'] = parent
return type(name, bases, attrs)
...and modifying the __getattribute__ method. The __getattribute__ method is a special method that gets called every time an attribute is accessed. For example:
class Foo:
def __init__(self):
self.bar = "baz"
def __getattribute__(self, item):
return 1
foo = Foo()
# these assert statements pass because even though `foo.bar` is set to "baz" and `foo.remote` doesn't exist, accessing either of them is the same as calling `Foo.__getattribute(foo, ...)`
assert foo.bar == 1
assert foo.remote == 1
So, by modifying the __getattribute__ method, you can make accessing self.Battery return a class that has its parent attribute equal to self, and also make it into a nested class:
class NestedClass(type):
def __new__(metacls, name, bases, attrs, parent=None):
attrs = dict(attrs.items())
# get the previous __getattribute__ in case it was not the default one
super_getattribute = attrs.get('__getattribute__', object.__getattribute__)
inner_class_cache = {}
def __getattribute__(self, attr):
# get the attribute
val = super_getattribute(self, attr)
if inspect.isclass(val) and getattr(val, '__is_inner_class__', False):
# if it is an inner class, then make a new version of it using the NestedClass metaclass, setting the parent attribute
if (self, val) not in inner_class_cache:
inner_class_cache[self, val] = NestedClass(val.__name__, val.__bases__, val.__dict__, parent=self)
return inner_class_cache[self, val]
else:
return val
attrs['__getattribute__'] = __getattribute__
attrs['parent'] = parent
return type(name, bases, attrs)
Note that a cache is used to ensure that self.Battery will always return the same object every time rather than re-making the class every time it is called. This ensures that checks like isinstance(c1.b, c1.Battery) work correctly, since otherwise c1.Battery would return a different object to the one used to create c1.b, causing this to return False, when it should return True.
And that's it! You can now enjoy nested classes without boilerplate!

Python: Arbitrary Class Modifier

While I managed to construct a manageable answer to my question:
class A(object):
def __init__(self, B, *args, **kwargs):
''' Modifies input class B, rendering it a super class of class A'''
_parent = {}
method_list = [func for func in dir(self) if callable(getattr(self, func))]
for att in B.__dir__():
_parent[att] = B.__getattribute__(att)
if att not in method_list:
try:
self.__setattr__(att, B.__getattribute__(
att))
except:
pass
B.__init__(self, *args, **kwargs)
self.__parent__ = _parent
#add self variables here
def func(self):
#modify inherited func here
self.__parent__['func']()
#modify inherited func here
I do not know if it always works and I would like to know if someone else has a better solution to this (rather trivial for other languages) question. Also, this solution is only applicable in Python3 and above (otherwise inspect module is needed for the replacement of the callable part)

Python shared property parent/child

Embarrassed to ask but I am using webapp2 and I am templating out a solution to make it easier to define routesbased on this google webapp2 route function. But it all depends on being able to define TYPE_NAME at the child level. The idea is the parent sets everything up and the child just needs to implement the _list function. The issue I ran into is TYPE_NAME is None and I need it to be the child.
#main WSGI is extended to have this function
class WSGIApplication(webapp2.WSGIApplication):
def route(self, *args, **kwargs):
def wrapper(func):
self.router.add(webapp2.Route(handler=func, *args, **kwargs))
return func
return wrapper
from main import application
class ParentHandler(RequestHandler):
TYPE_NAME = None
#application.route('/', name="list_%s" %TYPE_NAME)
def list(self):
return self._list()
class ChildHandler(ParentHandler):
TYPE_NAME = 'child'
def _list(self):
return []
I have tried a couple solutions using "class properties" but they didn't pan out. Open to other ideas, I basically just need the child class to inherit the decorated properties and execute them.
Edit:
For all of those on the edge of their seats wondering how I fix this,I was not able to get everything I needed out of the decorator so I ended up using a meta. I also added a _URLS parameter to allow for adding additional "routes". It maps custom function to the route. Really wanted to use a decorator but couldn't get it to work.
class RequestURLMeta(type):
def __new__(mcs, name, bases, dct):
result = super(RequestURLMeta, mcs).__new__(mcs, name, bases, dct)
urls = getattr(result, '_URLS', {}) or {}
for k,v in urls.iteritems():
template = v.pop('template')
app.route(getattr(result, k), template, **v)
if getattr(result, 'TYPE_NAME', None):
app.route(result.list, result.ROOT_PATH, methods=['GET'],name="%s" % result.TYPE_NAME)
#other ones went here..
return result
class ParentHandler(RequestHandler):
__metaclass__ = RequestURLMeta
class ChildHandler(ParentHandler):
TYPE_NAME = 'child'
_URLS = { 'custom': '/custom', 'TYPE_NAME': 'custom_test' }
def _list(self):
return []
def custom(self): pass
I think to get this to work you are going to need to use a metaclass. It might look something like the following (untested):
from main import application
class RouteMeta(type):
def __new__(mcs, name, bases, dct):
type_name = dct.get("TYPE_NAME")
if type_name is not None:
#application.route('/', type_name)
def list(self):
return self._list()
dct["list"] = list
return super(RouteMeta, mcs).__new__(mcs, name, bases, dct)
class ParentHandler(RequestHandler):
__metaclass__ = RouteMeta
class ChildHandler(ParentHandler):
TYPE_NAME = 'child'
def _list(self):
return []
Instead of having the list() method an attribute of ParentHandler, it is dynamically created for classes that inherit from ParentHandler and have TYPE_NAME defined.
If RequestHandler also uses a custom metaclass, have RouteMeta inherit from RequestHandler.__metaclass__ instead of type.
This code:
#application.route('/', name="list_%s" %TYPE_NAME)
def list(self):*emphasized text*
...
is semantically identical to this one:
def list(self):
...
list = application.route('/', name="list_%s" %TYPE_NAME)(list)
i.e. the method route is called inside the ParentHandler scope and
whatever lazy method you try, it will not work. You should try something
different:
from main import application
def route_list(klass):
klass.list = application.route('/',
name="list_%s" % klass.TYPE_NAME)(klass.list)
return klass
class ParentHandler(RequestHandler):
def list(self):
return self._list()
class ChildHandler(ParentHandler):
TYPE_NAME = 'child'
def _list(self):
return []
# in python3 would be:
# #route_list
# class ChildHandler(ParentHandler):
# ...
ChildHandler = route_list(ChildHandler)

Python pattern's RestrictingWrapper with metaclass

I'm trying to create a wrapper that blocks the execution of some methods. The classic solution is to use this pattern:
class RestrictingWrapper(object):
def __init__(self, w, block):
self._w = w
self._block = block
def __getattr__(self, n):
if n in self._block:
raise AttributeError, n
return getattr(self._w, n)
The problem with this solution is the overhead that introduces in every call, so I am trying to use a MetaClass to accomplish the same task. Here is my solution:
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
wrapped = dic['_w']
block = dic.get('_block', [])
new_class_dict = {}
new_class_dict.update(wrapped.__dict__)
for attr_to_block in block:
del new_class_dict[attr_to_block]
new_class_dict.update(dic)
return type.__new__(cls, name, bases, new_class_dict)
Works perfectly with simple classes:
class A(object):
def __init__(self, i):
self.i = i
def blocked(self):
return 'BAD: executed'
def no_blocked(self):
return 'OK: executed'
class B(object):
__metaclass__ = RestrictingMetaWrapper
_w = A
_block = ['blocked']
b= B('something')
b.no_blocked # 'OK: executed'
b.blocked # OK: AttributeError: 'B' object has no attribute 'blocked'
The problem comes with 'more complex' classes like ndarray from numpy:
class NArray(object):
__metaclass__ = RestrictingMetaWrapper
_w = np.ndarray
_block = ['max']
na = NArray() # OK
na.max() # OK: AttributeError: 'NArray' object has no attribute 'max'
na = NArray([3,3]) # TypeError: object.__new__() takes no parameters
na.min() # TypeError: descriptor 'min' for 'numpy.ndarray' objects doesn't apply to 'NArray' object
I assume that my metaclass is not well defined because other classes (ex: pandas.Series) suffer weird errors, like not blocking the indicated methods.
Could you find where the error is? Any other idea to solve this problem?
UPDATE:
The nneonneo's solution works great, but seems like wrapped classes can break the blocker with some black magic inside the class definition.
Using the nneonneo's solution:
import pandas
#restrict_methods('max')
class Row(pandas.Series):
pass
r = Row([1,2,3])
r.max() # BAD: 3 AttributeError expected
As it says in the TypeError, min (and related functions) will only work on instances of np.ndarray; thus, the new subclass must inherit from the class you are trying to wrap.
Then, since you extend the base class, you have to replace the methods with a suitable descriptor:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
block = dic.get('_block', [])
for attr in block:
dic[attr] = RestrictedMethod()
return type.__new__(cls, name, bases, dic) # note we inject the base class here
class NArray(np.ndarray):
__metaclass__ = RestrictingMetaWrapper
_block = ['max']
Note: enterprising applications can still access "restricted" functionality through the base class methods (e.g. np.ndarray.max(na)).
EDIT: Simplified the wrapper and made it transparently subclassable.
Note that this can all be done in a simpler way using a class decorator:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
def restrict_methods(*args):
def wrap(cls):
for attr in args:
setattr(cls, attr, RestrictedMethod())
return cls
return wrap
#restrict_methods('max', 'abs')
class NArray(np.ndarray):
pass

Python: How to register all child classes with the father class upon creation

I have python class trees, each made up of an abstract base class and many deriving concrete classes. I want all concrete classes to be accessible through a base-class method, and I do not want to specify anything during child-class creation.
This is what my imagined solution looks like:
class BaseClassA(object):
# <some magic code around here>
#classmethod
def getConcreteClasses(cls):
# <some magic related code here>
class ConcreteClassA1(BaseClassA):
# no magic-related code here
class ConcreteClassA2(BaseClassA):
# no magic-related code here
As much as possible, I'd prefer to write the "magic" once as a sort of design pattern. I want to be able to apply it to different class trees in different scenarios (i.e. add a similar tree with "BaseClassB" and its concrete classes).
Thanks Internet!
you can use meta classes for that:
class AutoRegister(type):
def __new__(mcs, name, bases, classdict):
new_cls = type.__new__(mcs, name, bases, classdict)
#print mcs, name, bases, classdict
for b in bases:
if hasattr(b, 'register_subclass'):
b.register_subclass(new_cls)
return new_cls
class AbstractClassA(object):
__metaclass__ = AutoRegister
_subclasses = []
#classmethod
def register_subclass(klass, cls):
klass._subclasses.append(cls)
#classmethod
def get_concrete_classes(klass):
return klass._subclasses
class ConcreteClassA1(AbstractClassA):
pass
class ConcreteClassA2(AbstractClassA):
pass
class ConcreteClassA3(ConcreteClassA2):
pass
print AbstractClassA.get_concrete_classes()
I'm personnaly very wary of this kind of magic. Don't put too much of this in your code.
Here is a simple solution using modern python's (3.6+) __init__subclass__ defined in PEP 487. It allows you to avoid using a meta-class.
class BaseClassA(object):
_subclasses = []
#classmethod
def get_concrete_classes(cls):
return list(cls._subclasses)
def __init_subclass__(cls):
BaseClassA._subclasses.append(cls)
class ConcreteClassA1(BaseClassA):
pass # no magic-related code here
class ConcreteClassA2(BaseClassA):
pass # no magic-related code here
print(BaseClassA.get_concrete_classes())
You should know that part of the answer you're looking for is built-in. New-style classes automatically keep a weak reference to all of their child classes which can be accessed with the __subclasses__ method:
#classmethod
def getConcreteClasses(cls):
return cls.__subclasses__()
This won't return sub-sub-classes. If you need those, you can create a recursive generator to get them all:
#classmethod
def getConcreteClasses(cls):
for c in cls.__subclasses__():
yield c
for c2 in c.getConcreteClasses():
yield c2
Another way to do this, with a decorator, if your subclasses are either not defining __init__ or are calling their parent's __init__:
def lister(cls):
cls.classes = list()
cls._init = cls.__init__
def init(self, *args, **kwargs):
cls = self.__class__
if cls not in cls.classes:
cls.classes.append(cls)
cls._init(self, *args, **kwargs)
cls.__init__ = init
#classmethod
def getclasses(cls):
return cls.classes
cls.getclasses = getclasses
return cls
#lister
class A(object): pass
class B(A): pass
class C(A):
def __init__(self):
super(C, self).__init__()
b = B()
c = C()
c2 = C()
print 'Classes:', c.getclasses()
It will work whether or not the base class defines __init__.

Categories