I have a class in which a method first needs to verify that an attribute is present and otherwise call a function to compute it. Then, ensuring that the attribute is not None, it performs some operations with it. I can see two slightly different design choices:
class myclass():
def __init__(self):
self.attr = None
def compute_attribute(self):
self.attr = 1
def print_attribute(self):
if self.attr is None:
self.compute_attribute()
print self.attr
And
class myclass2():
def __init__(self):
pass
def compute_attribute(self):
self.attr = 1
return self.attr
def print_attribute(self):
try:
attr = self.attr
except AttributeError:
attr = self.compute_attribute()
if attr is not None:
print attr
In the first design, I need to make sure that all the class attributes are set to None in advance, which can become verbose but also clarify the structure of the object.
The second choice seems to be the more widely used one. However, for my purposes (scientific computing related to information theory) using try except blocks everywhere can be a bit of an overkill given that this class doesn't really interact with other classes, it just takes data and computes a bunch of things.
Firstly, you can use hasattr to check if an object has an attribute, it returns True if the attribute exists.
hasattr(object, attribute) # will return True if the object has the attribute
Secondly, You can customise attribute access in Python, you can read more about it here: https://docs.python.org/2/reference/datamodel.html#customizing-attribute-access
Basically, you override the __getattr__ method to achieve this, so something like:
class myclass2():
def init(self):
pass
def compute_attr(self):
self.attr = 1
return self.attr
def print_attribute(self):
print self.attr
def __getattr__(self, name):
if hasattr(self, name) and getattr(self, name)!=None:
return getattr(self, name):
else:
compute_method="compute_"+name;
if hasattr(self, compute_method):
return getattr(self, compute_method)()
Make sure you only use getattr to access the attribute within __getattr__ or you'll end up with infinite recursion
Based on the answer jonrsharpe linked, I offer a third design choice. The idea here is that no special conditional logic is required at all either by the clients of MyClass or by code within MyClass itself. Instead, a decorator is applied to a function that does the (hypothetically expensive) computation of the property, and then that result is stored.
This means that the expensive computation is done lazily (only if a client tries to access the property) and only performed once.
def lazyprop(fn):
attr_name = '_lazy_' + fn.__name__
#property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazyprop
class MyClass(object):
#lazyprop
def attr(self):
print('Generating attr')
return 1
def __repr__(self):
return str(self.attr)
if __name__ == '__main__':
o = MyClass()
print(o.__dict__, end='\n\n')
print(o, end='\n\n')
print(o.__dict__, end='\n\n')
print(o)
Output
{}
Generating attr
1
{'_lazy_attr': 1}
1
Edit
Application of Cyclone's answer to OP's context:
class lazy_property(object):
'''
meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
'''
def __init__(self, fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return None
value = self.fget(obj)
setattr(obj, self.func_name, value)
return value
class MyClass(object):
#lazy_property
def attr(self):
print('Generating attr')
return 1
def __repr__(self):
return str(self.attr)
if __name__ == '__main__':
o = MyClass()
print(o.__dict__, end='\n\n')
print(o, end='\n\n')
print(o.__dict__, end='\n\n')
print(o)
The output is identical to above.
I have a big class which has a lot of functions and attributes.
the instances are created from data in a remote database.
the process of creating each instance is very long and heavy.
In performance sake ive created a bunch class from this heavy class.
so accessing the attributed is easy and works great .
the problem is how to use the methods from that class.
ex :
class clsA():
def __init__(self,obj):
self.attrA=obj.attrA
def someFunc(self):
print self
class bunchClsA(bunch):
def __getattr__(self, attr):
# this is the problem:
try:
#try and return a func
func = clsA.attr
return func
except:
# return simple attribute
return self.attr
Clearly this dosent work , Is there a way i could access the instance function staticly and override the "self" var ?
Found out a nice solution to the problem :
from bunch import Bunch
import types
#Original class:
class A():
y=6
def __init__(self,num):
self.x=num
def funcA(self):
print self.x
#class that wraps A using Bunch(thats what i needed .. u can use another):
class B(Bunch):
def __init__(self, data, cls):
self._cls = cls # notice, not an instance just the class it self
super(B, self).__init__(data)
def __getattr__(self, attr):
# Handles normal Bunch, dict attributes
if attr in self.keys():
return self[attr]
else:
res = getattr(self._cls, attr)
if isinstance(res, types.MethodType):
# returns the class func with self overriden
return types.MethodType(res.im_func, self, type(self))
else:
# returns class attributes like y
return res
data = {'x': 3}
ins_b = B(data, A)
print ins_b.funcA() # returns 3
print ins_b.y # returns 6
And this solves my issue, its a hack and if you have the privileges, redesign the code.
I'm using functools.partial to create a closure, and using setattr to make is callable from a class instance. The idea here is to create a set of methods at runtime.
#!/usr/bin/python
from functools import partial
class MyClass(object):
def __init__(self, val):
self.val = val
#classmethod
def generateMethods(self):
def dummy(conf1, self):
print "conf1:", conf1
print "self.val:", self.val
print
for s in ('dynamic_1', 'dynamic_2'):
closed = partial(dummy, s)
setattr(self, "test_{0}".format(s), closed)
It seems to me that partial would bind the current value of s to dummy's first arg, which would free up self to be passed when this is called from an instance.
It's not working how I'd expect
if __name__ == '__main__':
# Dynamically create some methods
MyClass.generateMethods()
# Create an instance
x = MyClass('FOO')
# The dynamically created methods aren't callable from the instance :(
#x.test_dynamic_1()
# TypeError: dummy() takes exactly 2 arguments (1 given)
# .. but these work just fine
MyClass.test_dynamic_1(x)
MyClass.test_dynamic_2(x)
Is it possible to dynamically create methods which are closures, but callable from instances of the class?
I think the new functools.partialmethod is for this exact use case.
Straight from the docs:
>>> class Cell(object):
... def __init__(self):
... self._alive = False
... #property
... def alive(self):
... return self._alive
... def set_state(self, state):
... self._alive = bool(state)
... set_alive = partialmethod(set_state, True)
... set_dead = partialmethod(set_state, False)
...
>>> c = Cell()
>>> c.alive
False
>>> c.set_alive()
>>> c.alive
True
The issue is that when you're calling them using the instances they are actually not bound methods, i.e they have no knowledge about the instance. Bound methods insert the self to the arguments of the underlying function automatically when called, it is stored in the __self__ attribute of bound method.
So, override __getattribute__ and see if the object being fetched is an instance of partial type or not, if yes, convert it to a bound method using types.MethodType.
Code:
#!/usr/bin/python
from functools import partial
import types
class MyClass(object):
def __init__(self, val):
self.val = val
#classmethod
def generateMethods(self):
def dummy(conf1, self):
print "conf1:", conf1
print "self.val:", self.val
print
for s in ('dynamic_1', 'dynamic_2'):
closed = partial(dummy, s)
setattr(self, "test_{0}".format(s), closed)
def __getattribute__(self, attr):
# Here we do have access to the much need instance(self)
obj = object.__getattribute__(self, attr)
if isinstance(obj, partial):
return types.MethodType(obj, self, type(self))
else:
return obj
if __name__ == '__main__':
MyClass.generateMethods()
x = MyClass('FOO')
x.test_dynamic_1()
x.test_dynamic_2()
I have a class that I want to share in a read-only fashion with children processes in a pool, so I prepared a proxy of a class but it didn't work. The following is a simplified example of my problem.
from multiprocessing.managers import BaseManager
class TestClass:
def __init__(self, a):
self.a = a
def b(self):
print self.a
class MyManager(BaseManager): pass
MyManager.register('test', TestClass)
if __name__ == '__main__':
manager = MyManager()
manager.start()
t = TestClass(1)
print t.a
mt = manager.test(2)
mt.b()
mt.a
When I run this code I get:
1
2
Traceback (most recent call last):
File "multiprocess_example_stackexchange.py", line 20, in <module>
mt.a
AttributeError: 'AutoProxy[test]' object has no attribute 'a'
It seems that I cannot access the attribute of a shared object directly via a proxy. Is the only way using a method that gets the attribute, or am I doing something wrong?
The Proxy objects used by multiprocessing.BaseManager and its sub-classes normally only expose methods from the objects they're referring to, not attributes. Now, there is multiprocessing.Manager().Namespace, which provides a Proxy sub-class that does provide access to attributes, rather than methods. We can create our own Proxy type which inherits from that, which enables access to all our attributes, as well as access to our b function:
from multiprocessing.managers import BaseManager, NamespaceProxy
class TestClass(object):
def __init__(self, a):
self.a = a
def b(self):
print self.a
class MyManager(BaseManager): pass
class TestProxy(NamespaceProxy):
# We need to expose the same __dunder__ methods as NamespaceProxy,
# in addition to the b method.
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__', 'b')
def b(self):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('b')
MyManager.register('test', TestClass, TestProxy)
if __name__ == '__main__':
manager = MyManager()
manager.start()
t = TestClass(1)
print t.a
mt = manager.test(2)
print mt.a
mt.a = 5
mt.b()
Output:
1
2
5
Edit:
If you want to be able to dynamically add methods from your original class to a Proxy class, you can do something like this:
from multiprocessing.managers import BaseManager, NamespaceProxy
import inspect
class TestClass(object):
def __init__(self, a):
self.a = a
def b(self):
print self.a
class AnotherClass(object):
def __init__(self, a):
self.a = a
def c(self):
print self.a
class MyManager(BaseManager): pass
class ProxyBase(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
class TestProxy(ProxyBase): pass
class AnotherProxy(ProxyBase): pass
def register_proxy(name, cls, proxy):
for attr in dir(cls):
if inspect.ismethod(getattr(cls, attr)) and not attr.startswith("__"):
proxy._exposed_ += (attr,)
setattr(proxy, attr,
lambda s: object.__getattribute__(s, '_callmethod')(attr))
MyManager.register(name, cls, proxy)
register_proxy('test', TestClass, TestProxy)
register_proxy('another', AnotherClass, AnotherProxy)
if __name__ == '__main__':
manager = MyManager()
manager.start()
mt = manager.test(2)
ma = manager.another(3)
mt.b()
ma.c()
mt.a = 5
ma.a = 6
mt.b()
ma.c()
After spending few hours to reading the source codes, here is the simplest ways to implement the proxy class to expose all attributes and methods:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(name, args)
return wrapper
return result
BaseManager.register('Test', Test, TestProxy)
manager = BaseManager()
test = manager.Test()
Also, here is an auto proxy method:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(key, args)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
TestProxy = Proxy(Test)
BaseManager.register('Test', Test, TestProxy)
manager = BaseManager()
test = manager.Test()
Since I do not have enough reputation to comment, I am posting an answer. The otherwise excellent answer by #shtse8 has a bug. I wanted to point this out since this page is one of the top hits when you search for queries regarding NamespaceProxy and the said answer has been used by others as well.
The problem lies in the below code:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(name, args) # Result not returned
return wrapper
return result
If you use this class (or the equivalent 'auto proxy' method mentioned in the answer) to create proxy objects, then all functions which are programmed to return a value will always return a NoneType instead (if you access the functions from the proxy). Moreover, they will not pass on any keyword arguments either. This is because in the wrapper the result of the method call use kwargs and neither is the call itself being returned. Therefore, we need to pass kwargs and put a return in the line self._callmethod(name, args).
The TestProxy class should then become:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(name, args, kwargs) # Note the return here
return wrapper
return result
The 'auto proxy' function would then become:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(key, args, kwargs)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
Update: Edited code and explanation to pass kwargs as well. Check Viktor's answer below
Here's a less verbose alternative that I found to work well in practice. Not sure if there are any disadvantages.
class TestClass:
def __init__(self, a):
self.a = a
def b(self):
print self.a
def wrap_test_class(*args, **kwargs):
obj = TestClass(*args, **kwargs)
obj.get_a = lambda: obj.a
return obj
class MyManager(BaseManager): pass
MyManager.register('test', wrap_test_class)
This allows you to access a by calling proxy_object.get_a()
This is an example of passing parameters (example: __getitem__) or not (example: __len__):
class TestProxy(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__','__len__','__getitem__')
def __len__(self):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__len__')
def __getitem__(self,index):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__getitem__',(index,))
Charcit's solution was working for me except I made a small completion/bugfixing. There kwargs cannot be passed to the called methods. So the fixed version:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(name, args, kwargs) # args and kwargs!
return wrapper
return result
Didn't test the 'autoproxy' method but this fix should apply there too:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(key, args, kwargs)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
I was originally using the answer by #shtse8 (as modified by #Charchit-Agarwal and #Viktor), which mostly worked for me, but it was trying to pickle the methods returned by the __getattr__ function, which didn't work in some use cases.
So here is another solution which is closer to the implementation of NamespaceProxy, but adds in the public methods the same way AutoProxy does.
def MakeProxyWithAttrs(target):
""" Create a derived NamespaceProxy class for `target`. """
# This bit follows what multiprocessing.managers.MakeProxy normally does.
dic = {}
public_methods = [m for m in dir(target) if m[0] != '_']
for meth in public_methods:
exec('''def %s(self, *args, **kwds):
return self._callmethod(%r, args, kwds)
'''%(meth,meth), dic)
# NamespaceProxy starts with __getattribute__ defined, so subclass from that
# rather than BaseProxy, as MakeProxy normally does.
proxy_name = target.__name__ + "_Proxy"
ProxyType = type(proxy_name, (NamespaceProxy,), dic)
# Expose all the public methods and also __getattribute__ and __setattr__.
ProxyType._exposed_ = tuple(public_methods + ['__getattribute__', '__setattr__'])
return ProxyType
class Manager(multiprocessing.managers.BaseManager): pass
test_proxy = MakeProxyWithAttrs(test_func)
Manager.register('test', test_func, test_proxy)
manager = Manager()
manager.start()
test = manager.test()
I'm trying to create a wrapper that blocks the execution of some methods. The classic solution is to use this pattern:
class RestrictingWrapper(object):
def __init__(self, w, block):
self._w = w
self._block = block
def __getattr__(self, n):
if n in self._block:
raise AttributeError, n
return getattr(self._w, n)
The problem with this solution is the overhead that introduces in every call, so I am trying to use a MetaClass to accomplish the same task. Here is my solution:
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
wrapped = dic['_w']
block = dic.get('_block', [])
new_class_dict = {}
new_class_dict.update(wrapped.__dict__)
for attr_to_block in block:
del new_class_dict[attr_to_block]
new_class_dict.update(dic)
return type.__new__(cls, name, bases, new_class_dict)
Works perfectly with simple classes:
class A(object):
def __init__(self, i):
self.i = i
def blocked(self):
return 'BAD: executed'
def no_blocked(self):
return 'OK: executed'
class B(object):
__metaclass__ = RestrictingMetaWrapper
_w = A
_block = ['blocked']
b= B('something')
b.no_blocked # 'OK: executed'
b.blocked # OK: AttributeError: 'B' object has no attribute 'blocked'
The problem comes with 'more complex' classes like ndarray from numpy:
class NArray(object):
__metaclass__ = RestrictingMetaWrapper
_w = np.ndarray
_block = ['max']
na = NArray() # OK
na.max() # OK: AttributeError: 'NArray' object has no attribute 'max'
na = NArray([3,3]) # TypeError: object.__new__() takes no parameters
na.min() # TypeError: descriptor 'min' for 'numpy.ndarray' objects doesn't apply to 'NArray' object
I assume that my metaclass is not well defined because other classes (ex: pandas.Series) suffer weird errors, like not blocking the indicated methods.
Could you find where the error is? Any other idea to solve this problem?
UPDATE:
The nneonneo's solution works great, but seems like wrapped classes can break the blocker with some black magic inside the class definition.
Using the nneonneo's solution:
import pandas
#restrict_methods('max')
class Row(pandas.Series):
pass
r = Row([1,2,3])
r.max() # BAD: 3 AttributeError expected
As it says in the TypeError, min (and related functions) will only work on instances of np.ndarray; thus, the new subclass must inherit from the class you are trying to wrap.
Then, since you extend the base class, you have to replace the methods with a suitable descriptor:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
class RestrictingMetaWrapper(type):
def __new__(cls, name, bases, dic):
block = dic.get('_block', [])
for attr in block:
dic[attr] = RestrictedMethod()
return type.__new__(cls, name, bases, dic) # note we inject the base class here
class NArray(np.ndarray):
__metaclass__ = RestrictingMetaWrapper
_block = ['max']
Note: enterprising applications can still access "restricted" functionality through the base class methods (e.g. np.ndarray.max(na)).
EDIT: Simplified the wrapper and made it transparently subclassable.
Note that this can all be done in a simpler way using a class decorator:
class RestrictedMethod(object):
def __get__(self, obj, objtype):
raise AttributeError("Access denied.")
def restrict_methods(*args):
def wrap(cls):
for attr in args:
setattr(cls, attr, RestrictedMethod())
return cls
return wrap
#restrict_methods('max', 'abs')
class NArray(np.ndarray):
pass