I am trying to create a wrapper class that behaves almost like the wrapped object. So far, I have come up with the following code:
import functools
import types
method_wrapper = type((None).__str__)
class Box:
def __new__(cls, obj):
attrs = {}
attr_names = dir(obj)
for attr_name in attr_names:
attr = obj.__getattribute__(attr_name)
if isinstance(attr, (types.MethodType, method_wrapper)):
"Attr is a bound method, ignore self"
#functools.wraps(attr)
def wrapped_attr(self, *args, **kwargs):
return attr(*args, **kwargs)
attrs[attr_name] = wrapped_attr
elif isinstance(attr, types.FunctionType):
"attr is a static method"
attrs[attr_name] = staticmethod(attr)
else:
"attr is a property"
attrs[attr_name] = attr
cls = type(type(obj).__name__,
(cls, type(obj)),
attrs)
return object.__new__(cls)
I tried testing it with:
if __name__ == '__main__':
x=Box(object())
However it comes up with the following error message:
TypeError: __init__() should return None, not 'NotImplementedType'
__init__ is being properly dispatched by isinstance(attr, (types.MethodType, method_wrapper)), and wrapped_attr seems to be executed. Do you have any idea why this is happening?
The problem is here:
for ...:
attr = ...
...
def wrapped_attr(...):
..attr..
This doesn't work as expected, because attr is rebound to various values by the for loop. All subfunctions will see the last value bound, not the value it had in that iteration of the loop. In this case, the last value bound, in alphabetical order, is __subclasshook__, which tends to return NotImplemented when called with random arguments.
Related
I've been using this style of inheritance to validate values set on instances of objects, but I'm wondering if there is a more fluent way to do this.
I'm following a spec where items of a certain classification (Foo) contain elements of a certain composition (Fe).
class Typed:
def __set__(self, obj, value):
assert isinstance(value, self._type), 'Incorrect type'
class Integer(Typed):
_type = int
class Float(Typed):
_type = float
class Positive(Typed):
def __set__(self, obj, value):
super().__set__(obj, value)
assert value >= 0, 'Positive Values Only Accepted'
class PositiveInteger(Integer, Positive):
pass
class PositiveFloat(Float, Positive):
pass
class Sized(Typed):
def __set__(self, obj, value):
super().__set__(obj, value)
assert value <= 2**self.size-1, f'{value} is too High'
class Fe(Sized, PositiveInteger):
name = 'Integer, 8 bit unsigned'
size = 8
class Foo(Fe):
name = 'Classificaion1'
def __set__(self, obj, id):
super().__set__(obj, id)
obj._id = id
def __get__(self, obj, objType=None):
return obj._id
def __del__(self):
pass
If you really need this level of abstraction, this is possibly the best way you can do it. My suggestion bellow can maybe save one line per class.
If you can afford to have attributes like "size" and "type" to be defined
on the final class, a richer base class and a declarative structure containing the checks as "lambda functions" can be used like this.
Note the usage of __init_subclass__ to check if all the parametes
needed for the guard expressions are defined:
from typing import Sequence
GUARDS = {
"typed": ((lambda self, value: "Incorrect type" if not instance(value, self._type) else None), ("_typed",)),
"positive": ((lambda self, value: "Only positive values" if value < 0 else None), ()),
"sized": ((lambda self, value: None if value <= 2 ** self.size - 1 else f"{value} must be smaller than 2**{self.size}"), ("size",)),
}
class DescriptorBase:
guards: Sequence[str]
def __init_subclass__(cls):
_sentinel = object()
for guard_name in cls.guards:
guard = GUARDS[guard_name]
required_attrs = guard[1]
missing = []
for attr in required_attrs:
if getattr(cls, attr, _sentinel) is _sentinel:
missing.append(attr)
if missing:
raise TypeError("Guarded descriptor {cls.__name__} did not declare required attrs: {missing}")
def __set_name__(self, owner, name):
self._name = f"_{name}""
def __set__(self, instance, value):
errors = []
for guard_name in self.guards:
if (error:= GUARDS[guard_name](self, value)) is not None:
errors.append(error)
if errors:
raise ValueError("\n".join(errors))
setattr (instance, self._name, value)
def __get__(self, instance, owner):
if instance is None:
return self
return getattr(instance, self.name)
def __del__(self, instance):
delattr(instance, self._name)
class Foo(DescriptorBase):
guards = ("typed", "positive", "sized")
size = 8
type_ = int
# No other code required here: __get__, __set__, __del__ handled in superclass
class UseAttr:
# Actual smart-attr usage:
my_foo = Foo()
Actually, if you want the class hierarchy, with less lines (no need to declare a __set__ method in each class), this approach can be used as well:
just change __init_superclass__ to collect "guards" in all superclasses,
and consolidate a single guards list on the class being defined, and then
define your composable guard-classes just as:
class Positive(BaseDescriptor):
guards = ("positive",)
class Sized(BaseDescriptor):
guards = ("sized",)
size = None
class Foo(Positive, Sized):
size = 8
class Fe(Foo):
name = "Fe name"
Actually, the change needed for this to work can be as simple as:
def __init_subclass__(cls):
_sentinel = object()
all_guards = []
for supercls in cls.__mro__:
all_guards.extend(getattr(supercls, "guards", ()))
# filter unique:
seem = {}
new_guards = []
for guard in all_guards:
if guard not in seem:
new_guards.append(guard)
seem.add(guard)
cls.guards = new_guards
for guard_name in cls.guards:
Also note that you could also collect the contents of the "GUARDS" registry from each defined class, instead of having to declare everything as lambdas before hand. I think you can get the idea from here on.
I have a complex unpickable object that has properties (defined via getters and setters) that are of complex and unpickable type as well. I want to create a multiprocessing proxy for the object to execute some tasks in parallel.
The problem: While I have succeeded to make the getter methods available for the proxy object, I fail to make the getters return proxies for the unpickable return objects.
My setup resembles the following:
from multiprocessing.managers import BaseManager, NamespaceProxy
class A():
#property
def a(self):
return B()
#property
def b(self):
return 2
# unpickable class
class B():
def __init__(self, *args):
self.f = lambda: 1
class ProxyBase(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
class AProxy(ProxyBase): pass
class BProxy(ProxyBase): pass
class MyManager(BaseManager):pass
MyManager.register('A', A, AProxy)
if __name__ == '__main__':
with MyManager() as manager:
myA = manager.A()
print(myA.b) # works great
print(myA.a) # raises error, because the object B is not pickable
I know that I can specify the result type of a method when registering it with the manager. That is, I can do
MyManager.register('A', A, AProxy, method_to_typeid={'__getattribute__':'B'})
MyManager.register('B', B, BProxy)
if __name__ == '__main__':
with MyManager() as manager:
myA = manager.A()
print(myA.a) # works great!
print(myA.b) # returns the same as myA.a ?!
It is clear to me that my solution does not work since the __getattr__ method applies to all properties, whereas I only want it to return a proxy for B when property a is accessed. How could I achieve this?
As a side question: if I remove the *args argument from the __init__ method of B, I get an error that it is called with the wrong number of arguments. Why? How could I resolve this?
I don't this is possible without some hacks, since the choice to return a value or proxy is made based on the method name alone, and not the type of the return value (from Server.serve_client):
try:
res = function(*args, **kwds)
except Exception as e:
msg = ('#ERROR', e)
else:
typeid = gettypeid and gettypeid.get(methodname, None)
if typeid:
rident, rexposed = self.create(conn, typeid, res)
token = Token(typeid, self.address, rident)
msg = ('#PROXY', (rexposed, token))
else:
msg = ('#RETURN', res)
Also keep in mind exposing __getattribute__ in an unpickable class's proxy basically breaks the proxy functionality when calling methods.
But if you're willing to hack it and just need attribute access, here is a working solution (note calling myA.a.f() still won't work, the lambda is an attribute and is not proxied, only methods are, but that's a different problem).
import os
from multiprocessing.managers import BaseManager, NamespaceProxy, Server
class A():
#property
def a(self):
return B()
#property
def b(self):
return 2
# unpickable class
class B():
def __init__(self, *args):
self.f = lambda: 1
self.pid = os.getpid()
class HackedObj:
def __init__(self, obj, gettypeid):
self.obj = obj
self.gettypeid = gettypeid
def __getattribute__(self, attr):
if attr == '__getattribute__':
return object.__getattribute__(self, attr)
obj = object.__getattribute__(self, 'obj')
result = object.__getattribute__(obj, attr)
if isinstance(result, B):
gettypeid = object.__getattribute__(self, 'gettypeid')
# This tells the server that the return value of this method is
# B, for which we've registered a proxy.
gettypeid['__getattribute__'] = 'B'
return result
class HackedDict:
def __init__(self, data):
self.data = data
def __setitem__(self, key, value):
self.data[key] = value
def __getitem__(self, key):
obj, exposed, gettypeid = self.data[key]
if isinstance(obj, A):
gettypeid = gettypeid.copy() if gettypeid else {}
# Now we need getattr to update gettypeid based on the result
# luckily BaseManager queries the typeid info after the function
# has been invoked
obj = HackedObj(obj, gettypeid)
return (obj, exposed, gettypeid)
class HackedServer(Server):
def __init__(self, registry, address, authkey, serializer):
super().__init__(registry, address, authkey, serializer)
self.id_to_obj = HackedDict(self.id_to_obj)
class MyManager(BaseManager):
_Server = HackedServer
class ProxyBase(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
class AProxy(ProxyBase): pass
class BProxy(ProxyBase): pass
MyManager.register('A', callable=A, proxytype=AProxy)
MyManager.register('B', callable=B, proxytype=BProxy)
if __name__ == '__main__':
print("This process: ", os.getpid())
with MyManager() as manager:
myB = manager.B()
print("Proxy process, using B directly: ", myB.pid)
myA = manager.A()
print('myA.b', myA.b)
print("Proxy process, via A: ", myA.a.pid)
The key to the solution is to replace the _Server in our manager, and then wrap the id_to_obj dict with the one that performs the hack for the specific method we need.
The hack consists on populating the gettypeid dict for the method, but only after it has been evaluated and we know the return type to be one that we would need a proxy for. And we're lucky in the order of evaluations, gettypeid is accessed after the method has been called.
Also luckily gettypeid is used as a local in the serve_client method, so we can return a copy of it and modify it and we don't introduce any concurrency issues.
While this was a fun exercise, I have to say I really advise against this solution, if you're dealing with external code that you cannot modify, you should simply create your own wrapper class that has explicit methods instead of #property accessors, proxy your own class instead, and use method_to_typeid.
I want to change the behavior of isinstance for a live python object.
One solution is to create a simple wrapper like the following, but I do not like it:
class Widget:
def __init__(self, obj):
self.inner_self = obj
lizard = ['head', 'nose', 'tail']
wlizard = Widget(lizard)
assert(isinstance(wlizard, Widget)) # no assertion error thrown
What I don't like about this particular wrapper, is that we must extract the lizard from wlizard before we can use the lizard again
try:
wlizard[0]
except:
print('sorry, wlizard doesn\'t behave like a lizard')
lizard = wlizard.inner_self
print(lizard[0]) # works fine
What I really want is for wlizard to behave exactly like lizard except that isinstance returns True for wlizard and it returns false for lizard.
The following sort-of works, but has some drawbacks:
class Widget:
pass
def MakeAWidget(obj):
class Blah(type(obj), Widget):
pass
# inherits type(obj)'s __init__ method
wobj = Blah(obj) # calls type(obj)'s copy constructor
return wobj
One problem is that this only works if type(obj)'s __init__() method takes in more than just self; in particular, that __init__ can take in an instance of type(obj), and when it does, it copies the attributes of obj into self. I would like something that works even if obj does not have a copy constructor. Something like the following might be possible to force the existence of a copy-constructor:
import copy
class Blah(type(obj), Widget):
def __init__(*args, **kwargs):
if isinstance(args[0], type(obj)):
self = copy.deepcopy(args[0])
return self
return super(type(self), self).__init__(*args, **kwargs)
However, I would rather not copy the object, only modify it in-place.
Something like the following might be possible, but I am not sure what __BLAH__ would be:
obj = ['apple', 'pear', 'banana']
assert(not isinstance(obj, Widget)) # no error thrown
obj.__BLAH__.append('Widget')
assert(isinstance(obj, Widget)) # no error thrown
Here's something I think does what you want. The wrap() function dynamically creates a class which is derived from the class of the obj argument passed to it, and then returns an instance of that class created from it. This assumes the class of obj supports copy construction (initialization from an instance of the same — or derived — class).
def wrap(obj):
class MetaClass(type):
def __new__(mcls, classname, bases, classdict):
wrapped_classname = '_%s_%s' % ('Wrapped', type(obj).__name__)
return type.__new__(mcls, wrapped_classname, (type(obj),)+bases, classdict)
class Wrapped(metaclass=MetaClass):
pass
return Wrapped(obj)
lizard = ['head', 'nose', 'tail']
wlizard = wrap(lizard)
print(type(wlizard).__name__) # -> _Wrapped_list
print(isinstance(wlizard, list)) # -> True
try:
wlizard[0]
except Exception as exc:
print(exc)
print("sorry, wlizard doesn't behave like lizard")
else:
print('wlizard[0] worked')
I think this is exactly what you want. This solution allows you to decorate any object so that the wrapper instance gets all the methods and attributes of the wrapped one.
This is a metaclass of the wrapper:
class WrapperMeta(type):
#classmethod
def __new_getattr(mcs, method, inst):
if method is None:
method = object.__getattribute__
def new_method(self, key):
try:
return method(self, key)
except AttributeError:
return method(inst.wrappee.fget(self), key)
return new_method
def __new__(mcs, name, bases, kwargs):
if not bases:
bases = (object,)
if len(bases) != 1:
raise TypeError("Wrapper can wrap only one class")
if type(kwargs.get("wrappee")) != property:
raise AttributeError("wrapper class must have a \"wrappee\" property")
inst = type.__new__(mcs, name, bases, kwargs)
inst.__getattribute__ = mcs.__new_getattr(inst.__getattribute__, inst)
return inst
It requires a wrapper to have exactly one parent class (the one you want to wrap), to have "wrappee" property and overrides __getattribute__ in a way you need.
This is a base class:
class VeryImportantClass:
def __init__(self):
self.a = 1
def very_important_function(self, n):
return n + self.a
This is a wrapper class:
class Wrapper(VeryImportantClass, metaclass=WrapperMeta):
def __init__(self, vii):
self._vii = vii
#property
def wrappee(self):
return self._vii
def very_important_addition(self, n):
return n - self.a * 4
And that is the result:
vii = VeryImportantClass()
vii = Wrapper(vii)
print(vii.very_important_function(5)) # 6
print(vii.very_important_addition(1)) # -3
print(isinstance(vii, VeryImportantClass)) # True
I have a class in which a method first needs to verify that an attribute is present and otherwise call a function to compute it. Then, ensuring that the attribute is not None, it performs some operations with it. I can see two slightly different design choices:
class myclass():
def __init__(self):
self.attr = None
def compute_attribute(self):
self.attr = 1
def print_attribute(self):
if self.attr is None:
self.compute_attribute()
print self.attr
And
class myclass2():
def __init__(self):
pass
def compute_attribute(self):
self.attr = 1
return self.attr
def print_attribute(self):
try:
attr = self.attr
except AttributeError:
attr = self.compute_attribute()
if attr is not None:
print attr
In the first design, I need to make sure that all the class attributes are set to None in advance, which can become verbose but also clarify the structure of the object.
The second choice seems to be the more widely used one. However, for my purposes (scientific computing related to information theory) using try except blocks everywhere can be a bit of an overkill given that this class doesn't really interact with other classes, it just takes data and computes a bunch of things.
Firstly, you can use hasattr to check if an object has an attribute, it returns True if the attribute exists.
hasattr(object, attribute) # will return True if the object has the attribute
Secondly, You can customise attribute access in Python, you can read more about it here: https://docs.python.org/2/reference/datamodel.html#customizing-attribute-access
Basically, you override the __getattr__ method to achieve this, so something like:
class myclass2():
def init(self):
pass
def compute_attr(self):
self.attr = 1
return self.attr
def print_attribute(self):
print self.attr
def __getattr__(self, name):
if hasattr(self, name) and getattr(self, name)!=None:
return getattr(self, name):
else:
compute_method="compute_"+name;
if hasattr(self, compute_method):
return getattr(self, compute_method)()
Make sure you only use getattr to access the attribute within __getattr__ or you'll end up with infinite recursion
Based on the answer jonrsharpe linked, I offer a third design choice. The idea here is that no special conditional logic is required at all either by the clients of MyClass or by code within MyClass itself. Instead, a decorator is applied to a function that does the (hypothetically expensive) computation of the property, and then that result is stored.
This means that the expensive computation is done lazily (only if a client tries to access the property) and only performed once.
def lazyprop(fn):
attr_name = '_lazy_' + fn.__name__
#property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazyprop
class MyClass(object):
#lazyprop
def attr(self):
print('Generating attr')
return 1
def __repr__(self):
return str(self.attr)
if __name__ == '__main__':
o = MyClass()
print(o.__dict__, end='\n\n')
print(o, end='\n\n')
print(o.__dict__, end='\n\n')
print(o)
Output
{}
Generating attr
1
{'_lazy_attr': 1}
1
Edit
Application of Cyclone's answer to OP's context:
class lazy_property(object):
'''
meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
'''
def __init__(self, fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return None
value = self.fget(obj)
setattr(obj, self.func_name, value)
return value
class MyClass(object):
#lazy_property
def attr(self):
print('Generating attr')
return 1
def __repr__(self):
return str(self.attr)
if __name__ == '__main__':
o = MyClass()
print(o.__dict__, end='\n\n')
print(o, end='\n\n')
print(o.__dict__, end='\n\n')
print(o)
The output is identical to above.
I'm trying to create a wrapper that blocks the execution of some methods. The classic solution is to use this pattern:
class RestrictingWrapper(object):
def __init__(self, w, block):
self._w = w
self._block = block
def __getattr__(self, n):
if n in self._block:
raise AttributeError, n
return getattr(self._w, n)
The problem with this solution is the overhead that introduces in every call, so I am trying to use a MetaClass to accomplish the same task. Here is my solution:
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
wrapped = dic['_w']
block = dic.get('_block', [])
new_class_dict = {}
new_class_dict.update(wrapped.__dict__)
for attr_to_block in block:
del new_class_dict[attr_to_block]
new_class_dict.update(dic)
return type.__new__(cls, name, bases, new_class_dict)
Works perfectly with simple classes:
class A(object):
def __init__(self, i):
self.i = i
def blocked(self):
return 'BAD: executed'
def no_blocked(self):
return 'OK: executed'
class B(object):
__metaclass__ = RestrictingMetaWrapper
_w = A
_block = ['blocked']
b= B('something')
b.no_blocked # 'OK: executed'
b.blocked # OK: AttributeError: 'B' object has no attribute 'blocked'
The problem comes with 'more complex' classes like ndarray from numpy:
class NArray(object):
__metaclass__ = RestrictingMetaWrapper
_w = np.ndarray
_block = ['max']
na = NArray() # OK
na.max() # OK: AttributeError: 'NArray' object has no attribute 'max'
na = NArray([3,3]) # TypeError: object.__new__() takes no parameters
na.min() # TypeError: descriptor 'min' for 'numpy.ndarray' objects doesn't apply to 'NArray' object
I assume that my metaclass is not well defined because other classes (ex: pandas.Series) suffer weird errors, like not blocking the indicated methods.
Could you find where the error is? Any other idea to solve this problem?
UPDATE:
The nneonneo's solution works great, but seems like wrapped classes can break the blocker with some black magic inside the class definition.
Using the nneonneo's solution:
import pandas
#restrict_methods('max')
class Row(pandas.Series):
pass
r = Row([1,2,3])
r.max() # BAD: 3 AttributeError expected
As it says in the TypeError, min (and related functions) will only work on instances of np.ndarray; thus, the new subclass must inherit from the class you are trying to wrap.
Then, since you extend the base class, you have to replace the methods with a suitable descriptor:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
block = dic.get('_block', [])
for attr in block:
dic[attr] = RestrictedMethod()
return type.__new__(cls, name, bases, dic) # note we inject the base class here
class NArray(np.ndarray):
__metaclass__ = RestrictingMetaWrapper
_block = ['max']
Note: enterprising applications can still access "restricted" functionality through the base class methods (e.g. np.ndarray.max(na)).
EDIT: Simplified the wrapper and made it transparently subclassable.
Note that this can all be done in a simpler way using a class decorator:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
def restrict_methods(*args):
def wrap(cls):
for attr in args:
setattr(cls, attr, RestrictedMethod())
return cls
return wrap
#restrict_methods('max', 'abs')
class NArray(np.ndarray):
pass