How to understand lazy function in Django utils functional module - python

I am learning from Django source code.
When I read about functional module in Django,
I don't know how to understand it.
What the function is for and how to explain the implement of it.
This is my first to use stackoverflow.
If some rules in here I didn't notice, please remind me.Thanks.
the code:
class Promise(object):
"""
This is just a base class for the proxy class created in
the closure of the lazy function. It can be used to recognize
promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turns any callable into a lazy evaluated callable. You need to give result
classes or types -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
#total_ordering
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__dispatch = None
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if self.__dispatch is None:
self.__prepare_class__()
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses
)
#classmethod
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
for (k, v) in type_.__dict__.items():
# All __promise__ return the same wrapper method, but
# they also do setup, inserting the method into the
# dispatch dict.
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_bytes = bytes in resultclasses
cls._delegate_text = six.text_type in resultclasses
assert not (cls._delegate_bytes and cls._delegate_text), "Cannot call lazy() with both bytes and text return types."
if cls._delegate_text:
if six.PY3:
cls.__str__ = cls.__text_cast
else:
cls.__unicode__ = cls.__text_cast
elif cls._delegate_bytes:
if six.PY3:
cls.__bytes__ = cls.__bytes_cast
else:
cls.__str__ = cls.__bytes_cast
#classmethod
def __promise__(cls, klass, funcname, method):
# Builds a wrapper around some magic method and registers that
# magic method for the given type and method name.
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
for t in type(res).mro():
if t in self.__dispatch:
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
return __wrapper__
def __text_cast(self):
return func(*self.__args, **self.__kw)
def __bytes_cast(self):
return bytes(func(*self.__args, **self.__kw))
def __cast(self):
if self._delegate_bytes:
return self.__bytes_cast()
elif self._delegate_text:
return self.__text_cast()
else:
return func(*self.__args, **self.__kw)
def __ne__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() != other
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() == other
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() < other
def __hash__(self):
return hash(self.__cast())
def __mod__(self, rhs):
if self._delegate_bytes and six.PY2:
return bytes(self) % rhs
elif self._delegate_text:
return six.text_type(self) % rhs
return self.__cast() % rhs
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
#wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__

This function takes function and any number of classes.
If to simplify, it returns wrapper(lets say "lazy function") instead of that function. At that point we can say that we turned function
into lazy function.
After that we can call this lazy function. Once called, it will return instance of proxy class, without calling the initial
function instead of result of initial function.
The initial function will be called only after we invoke any method on that result(proxy instance).
*resultclasses here is the classes, instances of which are expected as results of the initial function
For example:
def func(text):
return text.title()
lazy_func = lazy(func, str)
#lazy functon. prepared to dispatch any method of str instance.
res = lazy_func('test') #instance of __proxy__ class instead of 'Test' string.
res.find('T') #only at that point we call the initial function
I'll try to explain how it works in overall:
def lazy(func, *resultclasses): #On decorate
#total_ordering
class __proxy__(Promise):
__dispatch = None
def __init__(self, args, kw): #On call
#3) __proxy__ instance stores the original call's args and kwargs. args = ('Test', ) for our example
self.__args = args
self.__kw = kw
if self.__dispatch is None:
self.__prepare_class__()
#4) if it's the first call ot lazy function, we should prepare __proxy__ class
#On the first call of the __wrapper__ function we should prepare class. Class preparation in this case
#means that we'll fill the __dispatch class attribute with links to all methods of each result class.
#We need to prepare class only on first call.
#classmethod
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
#5) Looping through the resultclasses. In our example it's only str
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
#6) looping through each superclass of each resultclass in reversed direction.
# So that'll be (object, str) for our example
for (k, v) in type_.__dict__.items():
#7) Looping through each attribute of each superclass. For example k = 'find', v = str.find
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
#9) If __proxy__ class doesn't have attribute 'find' for example, we set the __wrapper__ to
#that attribute
#So class __proxy__ will have the __wrapper__ method in __proxy__.__dict__['find'].
#And so on for all methods.
#classmethod
def __promise__(cls, klass, funcname, method):
# Builds a wrapper around some magic method and registers that
# magic method for the given type and method name.
def __wrapper__(self, *args, **kw): #При вызове каждого метода результирующего класса (str)
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
#10 finally we call the original function
for t in type(res).mro():
#11) We're looping through all the superclasses of result's class from the bottom to the top
#That''ll be (str, object) for our example
if t in self.__dispatch:
#12) If the class is dispatched we pass the result with args and kwargs to
#__proxy__.__dispatch[str]['find'] which is unbound method 'find' of str class
#For our example res = 'Test', args = ('T', )
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
#7) Adds __proxy__.__dispatch[str]['find'] = str.find for example which is unbound method 'find' of str class
#and so on with each method of each superclass of each resultclass
#8) Returns new __wrapper__ method for each method of each resultclass. This wrapper method has the
#funcname variable in closure.
return __wrapper__
#wraps(func) #makes the lazy function look like the initial
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
#2)On call of lazy function we get __proxy__ instance instead of the actual value
return __wrapper__
#1)As the result of lazy(func, *resultclasses) call we get the __wrapper__ function, which looks like
#the initial function because of the #wraps decorator

Related

How can I return self and another variable in a python class method while method chaining?

I understand what I am asking here is probably not the best code design, but the reason for me asking is strictly academic. I am trying to understand how to make this concept work.
Typically, I will return self from a class method so that the following methods can be chained together. My understanding is by returning self, I am simply returning an instance of the class, for the following methods to work on.
But in this case, I am trying to figure out how to return both self and another value from the method. The idea is if I do not want to chain, or I do not call any class attributes, I want to retrieve the data from the method being called.
Consider this example:
class Test(object):
def __init__(self):
self.hold = None
def methoda(self):
self.hold = 'lol'
return self, 'lol'
def newmethod(self):
self.hold = self.hold * 2
return self, 2
t = Test()
t.methoda().newmethod()
print(t.hold)
In this case, I will get an AttributeError: 'tuple' object has no attribute 'newmethod' which is to be expected because the methoda method is returning a tuple which does not have any methods or attributes called newmethod.
My question is not about unpacking multiple returns, but more about how can I continue to chain methods when the preceding methods are returning multiple values. I also understand that I can control the methods return with an argument to it, but that is not what I am trying to do.
As mentioned previously, I do realize this is probably a bad question, and I am happy to delete the post if the question doesnt make any sense.
Following the suggestion by #JohnColeman, you can return a special tuple with attribute lookup delegated to your object if it is not a normal tuple attribute. That way it acts like a normal tuple except when you are chaining methods.
You can implement this as follows:
class ChainResult(tuple):
def __new__(cls, *args):
return super(ChainResult, cls).__new__(cls, args)
def __getattribute__(self, name):
try:
return getattr(super(), name)
except AttributeError:
return getattr(super().__getitem__(0), name)
class Test(object):
def __init__(self):
self.hold = None
def methoda(self):
self.hold = 'lol'
return ChainResult(self, 'lol')
def newmethod(self):
self.hold = self.hold * 2
return ChainResult(self, 2)
Testing:
>>> t = Test()
>>> t.methoda().newmethod()
>>> print(t.hold)
lollol
The returned result does indeed act as a tuple:
>>> t, res = t.methoda().newmethod()
>>> print(res)
2
>>> print(isinstance(t.methoda().newmethod(), tuple))
True
You could imagine all sorts of semantics with this, such as forwarding the returned values to the next method in the chain using closure:
class ChainResult(tuple):
def __new__(cls, *args):
return super(ChainResult, cls).__new__(cls, args)
def __getattribute__(self, name):
try:
return getattr(super(), name)
except AttributeError:
attr = getattr(super().__getitem__(0), name)
if callable(attr):
chain_results = super().__getitem__(slice(1, None))
return lambda *args, **kw: attr(*(chain_results+args), **kw)
else:
return attr
For example,
class Test:
...
def methodb(self, *args):
print(*args)
would produce
>>> t = Test()
>>> t.methoda().methodb('catz')
lol catz
It would be nice if you could make ChainResults invisible. You can almost do it by initializing the tuple base class with the normal results and saving your object in a separate attribute used only for chaining. Then use a class decorator that wraps every method with ChainResults(self, self.method(*args, **kw)). It will work okay for methods that return a tuple but a single value return will act like a length 1 tuple, so you will need something like obj.method()[0] or result, = obj.method() to work with it. I played a bit with delegating to tuple for a multiple return or to the value itself for a single return; maybe it could be made to work but it introduces so many ambiguities that I doubt it could work well.

Accessing self in a function attribute

I'm trying to add a decorator that adds callable attributes to functions that return slightly different objects than the return value of the function, but will execute the function at some point.
The problem I'm running into is that when the function object is passed into the decorator, it is unbound and doesn't contain the implicit self argument. When I call the created attribute function (ie. string()), I don't have access to self and can't pass it into the original function.
def deco(func):
"""
Add an attribute to the function takes the same arguments as the
function but modifies the output.
"""
def string(*args, **kwargs):
return str(func(*args, **kwargs))
func.string = string
return func
class Test(object):
def __init__(self, value):
self._value = 1
#deco
def plus(self, n):
return self._value + n
When I go to execute the attribute created by the decorator, this is the error I get, because args doesn't contain the self reference.
>>> t = Test(100)
>>> t.plus(1) # Gets passed self implicitly
101
>>> t.plus.string(1) # Does not get passed self implicitly
...
TypeError: plus() takes exactly 2 arguments (1 given)
Is there a way to create a decorator like this that can get a reference to self? Or is there a way to bind the added attribute function (string()) so that it also gets called with the implicit self argument?
You can use descriptors here:
class deco(object):
def __init__(self, func):
self.func = func
self.parent_obj = None
def __get__(self, obj, type=None):
self.parent_obj = obj
return self
def __call__(self, *args, **kwargs):
return self.func(self.parent_obj, *args, **kwargs)
def string(self, *args, **kwargs):
return str(self(*args, **kwargs))
class Test(object):
def __init__(self, value):
self._value = value
#deco
def plus(self, n):
return self._value + n
so that:
>>> test = Test(3)
>>> test.plus(1)
4
>>> test.plus.string(1)
'4'
This warrants an explanation. deco is a decorator, but it is also a descriptor. A descriptor is an object that defines alternative behavior that is to be invoked when the object is looked up as an attribute of its parent. Interestingly, bounds methods are themselves implemented using the descriptor protocol
That's a mouthful. Let's look at what happens when we run the example code. First, when we define the plus method, we apply the deco decorator. Now normally we see functions as decorators, and the return value of the function is the decorated result. Here we are using a class as a decorator. As a result, Test.plus isn't a function, but rather an instance of the deco type. This instance contains a reference to the plus function that we wish to wrap.
The deco class has a __call__ method that allows instances of it to act like functions. This implementation simply passes the arguments given to the plus function it has a reference to. Note that the first argument will be the reference to the Test instance.
The tricky part comes in implementing test.plus.string(1). To do this, we need a reference to the test instance of which the plus instance is an attribute. To accomplish this, we use the descriptor protocol. That is, we define a __get__ method which will be invoked whenever the deco instance is accessed as an attribute of some parent class instance. When this happens, it stores the parent object inside itself. Then we can simply implement plus.string as a method on the deco class, and use the reference to the parent object stored within the deco instance to get at the test instance to which plus belongs.
This is a lot of magic, so here's a disclaimer: Though this looks cool, it's probably not a great idea to implement something like this.
You need to decorate your function at instantiation time (before creating the instance method). You can do this by overriding the __new__ method:
class Test(object):
def __new__(cls, *args_, **kwargs_):
def deco(func):
def string(*args, **kwargs):
return "my_str is :" + str(func(*args, **kwargs))
# *1
func.__func__.string = string
return func
obj = object.__new__(cls, *args_, **kwargs_)
setattr(obj, 'plus', deco(getattr(obj, 'plus')))
return obj
def __init__(self, value):
self._value = 1
def plus(self, n):
return self._value + n
Demo:
>>> t = Test(100)
>>> t.plus(1)
>>> t.plus.string(5)
>>> 'my_str is :6'
1. Since python doesn't let you access the real instance attribute at setting time you can use __func__ method in order to access the real function object of the instance method.

Accessing self from outside of a class

I'm attempting to implement a decorator on certain methods in a class so that if the value has NOT been calculated yet, the method will calculate the value, otherwise it will just return the precomputed value, which is stored in an instance defaultdict. I can't seem to figure out how to access the instance defaultdict from inside of a decorator declared outside of the class. Any ideas on how to implement this?
Here are the imports (for a working example):
from collections import defaultdict
from math import sqrt
Here is my decorator:
class CalcOrPass:
def __init__(self, func):
self.f = func
#if the value is already in the instance dict from SimpleData,
#don't recalculate the values, instead return the value from the dict
def __call__(self, *args, **kwargs):
# can't figure out how to access/pass dict_from_SimpleData to here :(
res = dict_from_SimpleData[self.f.__name__]
if not res:
res = self.f(*args, **kwargs)
dict_from_SimpleData[self.f__name__] = res
return res
And here's the SimpleData class with decorated methods:
class SimpleData:
def __init__(self, data):
self.data = data
self.stats = defaultdict() #here's the dict I'm trying to access
#CalcOrPass
def mean(self):
return sum(self.data)/float(len(self.data))
#CalcOrPass
def se(self):
return [i - self.mean() for i in self.data]
#CalcOrPass
def variance(self):
return sum(i**2 for i in self.se()) / float(len(self.data) - 1)
#CalcOrPass
def stdev(self):
return sqrt(self.variance())
So far, I've tried declaring the decorator inside of SimpleData, trying to pass multiple arguments with the decorator(apparently you can't do this), and spinning around in my swivel chair while trying to toss paper airplanes into my scorpion tank. Any help would be appreciated!
The way you define your decorator the target object information is lost. Use a function wrapper instead:
def CalcOrPass(func):
#wraps(func)
def result(self, *args, **kwargs):
res = self.stats[func.__name__]
if not res:
res = func(self, *args, **kwargs)
self.stats[func.__name__] = res
return res
return result
wraps is from functools and not strictly necessary here, but very convenient.
Side note: defaultdict takes a factory function argument:
defaultdict(lambda: None)
But since you're testing for the existence of the key anyway, you should prefer a simple dict.
You can't do what you want when your function is defined, because it is unbound. Here's a way to achieve it in a generic fashion at runtime:
class CalcOrPass(object):
def __init__(self, func):
self.f = func
def __get__(self, obj, type=None): # Cheat.
return self.__class__(self.f.__get__(obj, type))
#if the value is already in the instance dict from SimpleData,
#don't recalculate the values, instead return the value from the dict
def __call__(self, *args, **kwargs):
# I'll concede that this doesn't look very pretty.
# TODO handle KeyError here
res = self.f.__self__.stats[self.f.__name__]
if not res:
res = self.f(*args, **kwargs)
self.f.__self__.stats[self.f__name__] = res
return res
A short explanation:
Our decorator defines __get__ (and is hence said to be a descriptor). Whereas the default behaviour for an attribute access is to get it from the object's dictionary, if the descriptor method is defined, Python will call that instead.
The case with objects is that object.__getattribute__ transforms an access like b.x into type(b).__dict__['x'].__get__(b, type(b))
This way we can access the bound class and its type from the descriptor's parameters.
Then we create a new CalcOrPass object which now decorates (wraps) a bound method instead of the old unbound function.
Note the new style class definition. I'm not sure if this will work with old-style classes, as I haven't tried it; just don't use those. :) This will work for both functions and methods, however.
What happens to the "old" decorated functions is left as an exercise.

Implementing __neg__ in a generic way for all subclasses in Python

I apologize in advance for the rather long question.
I'm implementing callable objects and would like them to behave somewhat like (mathematical) functions. I have a base class whose __call__ method raises NotImplementedError so users must subclass to define __call__. My question is: how can I define the special method __neg__ in the base class so subclasses immediately have the expected behavior without having the implement __neg__ in each subclass? My sense of the expected behavior is that if f is an instance of (a subclass of) the base class with a properly defined __call__, then -f should be an instance of the same class as f, possessing all the same attributes as f, except for __call__, which should return the negative of f's __call__.
Here's an example of what I mean:
class Base(object):
def __call__(self, *args, **kwargs):
raise NotImplementedError, 'Please subclass'
def __neg__(self):
def call(*args, **kwargs):
return -self(*args, **kwargs)
mBase = type('mBase', (Base,), {'__call__': call})
return mBase()
class One(Base):
def __init__(self data):
self.data = data
def __call__(self, *args, **kwargs):
return 1
This has the expected behavior:
one = One()
print one() # Prints 1
minus_one = -one
print minus_one() # Prints -1
though it's not exactly what I'd like since minus_one is not an instance of the same class as one (but I could live with that).
Now I'd like the new instance minus_one to inherit all attributes and methods of one; only the __call__ method should change. So I could change __neg__ to
def __neg__(self):
def call(*args, **kwargs):
return -self(*args, **kwargs)
mBase = type('mBase', (Base,), {'__call__': call})
new = mBase()
for n, v in inspect.getmembers(self):
if n != '__call__':
setattr(new, n, v)
return new
This seems to work. My question is: are there cons to this strategy? Implementing a generic __neg__ must be a standard exercise but I couldn't find anything on it on the web. Are there recommended alternatives?
Thanks in advance for any comments.
Your approach has several downsides. One example is that you copy all members of the original instance to the new instance -- this won't work if your class overrides any special methods other than __call__, since special methods are only looked up in the dictionary of the object's type when called implicitly. Moreover, it copies a lot of stuff that is actually inherited from object and doesn't need to go in the instance's __dict__.
An easier approach that satisfies your exact requirements is to make the new type a subclass of the instance's original type. This can be done by defining a local class inside the __neg__() method:
def __neg__(self):
class Neg(self.__class__):
def __call__(self_, *args, **kwargs):
return -self(*args, **kwargs)
neg = Base.__new__(Neg)
neg.__dict__ = self.__dict__.copy()
return neg
This defines a new class Neg derived from the original function's type and overwrites its __call__() method. It creates an instance of this class using Base's constructor -- this is to cover the case that self's class would take constructor arguments. finally we copy everything that is directly stored in the instance self to the new instance.
If I were to design the system, I'd take a completely different approach. I'd fix the interface for a function and would only rely on this fixed interface for every function. I wouldn't bother to copy all attributes of an instance to the negated function, but rather do this:
class Function(object):
def __neg__(self):
return NegatedFunction(self)
def __add__(self, other):
return SumFunction(self, other)
class NegatedFunction(Function):
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
return -self.f(*args, **kwargs)
class SumFunction(Function):
def __init__(self, *funcs):
self.funcs = funcs
def __call__(self, *args, **kwargs):
return sum(f(*args, **kwargs) for f in self.funcs)
This approach does not fulfil your requirement that the function returned by __neg__() has all the attributes and methods of the original function, but I think this requirement is rather questionable as far as design is concerned. I think dropping this requirement will give you a much cleaner and more general approach (as demonstrated by including an __add__() operator in the example above).
The basic problem you're running into is that __xxx__ methods are only looked up on the class, which means all instances of the same class will use the same __xxx__ methods. This suggests using a method similar to what Cat Plus Plus suggested; however, you also don't want your users to have to worry about even more special names (such as _call_impl and _negate).
If you don't mind the possibly mind-melting power of metaclasses, that is the route to take. A metaclass can add in the _negate attribute automatically (and name mangle it to avoid clashes), as well as take the __call__ that your user wrote and rename it to _call, then create a new __call__ that calls the old __call__ (now called _call ;) and then negates the result, if necessary, before returning it.
Here's the code:
import copy
import inspect
class MetaFunction(type):
def __new__(metacls, cls_name, cls_bases, cls_dict):
result_class = type.__new__(metacls, cls_name, cls_bases, cls_dict)
if '__call__' in cls_dict:
original_call = cls_dict['__call__']
args, varargs, kwargs, defaults = inspect.getargspec(original_call)
args = args[1:]
if defaults is None:
defaults = [''] * len(args)
else:
defaults = [''] * (len(args) - len(defaults)) + list(defaults)
signature = []
for arg, default in zip(args, defaults):
if default:
signature.append('%s=%s' % (arg, default))
else:
signature.append(arg)
if varargs is not None:
signature.append(varargs)
if kwargs is not None:
signature.append(kwargs)
signature = ', '.join(signature)
passed_args = ', '.join(args)
new_call = (
"""def __call__(self, %(signature)s):
result = self._call(%(passed_args)s)
if self._%(cls_name)s__negate:
result = -result
return result"""
% {
'cls_name':cls_name,
'signature':signature,
'passed_args':passed_args,
})
eval_dict = {}
exec new_call in eval_dict
new_call = eval_dict['__call__']
new_call.__doc__ = original_call.__doc__
new_call.__module__ = original_call.__module__
new_call.__dict__ = original_call.__dict__
setattr(result_class, '__call__', new_call)
setattr(result_class, '_call', original_call)
setattr(result_class, '_%s__negate' % cls_name, False)
negate = """def __neg__(self):
"returns an instance of the same class that returns the negation of __call__"
negated = copy.copy(self)
negated._%(cls_name)s__negate = not self._%(cls_name)s__negate
return negated""" % {'cls_name':cls_name}
eval_dict = {'copy':copy}
exec negate in eval_dict
negate = eval_dict['__neg__']
negate.__module__ = new_call.__module__
setattr(result_class, '__neg__', eval_dict['__neg__'])
return result_class
class Base(object):
__metaclass__ = MetaFunction
class Power(Base):
def __init__(self, power):
"power = the power to raise to"
self.power = power
def __call__(self, number):
"raises number to power"
return number ** self.power
and an example:
--> square = Power(2)
--> neg_square = -square
--> square(9)
81
--> neg_square(9)
-81
While the metaclass code itself can be complex, the resulting objects can be very easy to use. To be fair, most of the code, and the complexity, in MetaFunction is due to re-writing __call__ in order to preserve the call signature and make introspection useful... so instead of seeing __call__(*args, *kwargs) in help, you see this:
Help on Power in module test object:
class Power(Base)
| Method resolution order:
| Power
| Base
| __builtin__.object
|
| Methods defined here:
|
| __call__(self, number)
| raises number to power
|
| __init__(self, power)
| power = the power to raise to
|
| __neg__(self)
| returns an instance of the same class that returns the negation of __call__
Instead of creating new type, you can keep a flag on the instance that says whether call result should be negated or not. And then you can offload the actual overrideable call behaviour to a separate (non-special) method, as part of your own protocol.
class Base(object):
def __init__(self):
self._negate_call = False
def call_impl(self, *args, **kwargs):
raise NotImplementedError
def __call__(self, *args, **kwargs):
result = self.call_impl(*args, **kwargs)
return -result if self._negate_call else result
def __neg__(self):
other = copy.copy(self)
other._negate_call = not other._negate_call
return other

Python __getattribute__ (or __getattr__) to emulate php __call

I would like to create a class that effectively does this (mixing a little PHP with Python)
class Middle(object) :
# self.apply is a function that applies a function to a list
# e.g self.apply = [] ... self.apply.append(foobar)
def __call(self, name, *args) :
self.apply(name, *args)
Thus allowing for code to say:
m = Middle()
m.process_foo(a, b, c)
In this case __call() is the PHP __call() method which is invoked when a method is not found on an object.
You need to define __getattr__, it is called if an attribute is not otherwise found on your object.
Notice that getattr is called for any failed lookup, and that you don't get it like a function all, so you have to return the method that will be called.
def __getattr__(self, attr):
def default_method(*args):
self.apply(attr, *args)
return default_method
Consider passing arguments to your methods as arguments, not encoded into the method name which will then be magically used as an argument.
Where are you writing code that doesn't know what methods it will be calling?
Why call c.do_Something(x) and then unpack the method name instead of just calling c.do('Something', x) ?
In any case it's easy enough to handle unfound attributes:
class Dispatcher(object):
def __getattr__(self, key):
try:
return object.__getattr__(self, key)
except AttributeError:
return self.dispatch(key)
def default(self, *args, **kw):
print "Assuming default method"
print args, kw
def dispatch(self, key):
print 'Looking for method: %s'%(key,)
return self.default
A test:
>>> d = Dispatcher()
>>> d.hello()
Looking for method: hello
Assuming default method
() {}
This seems to be fraught with "gotchas" - the thing returned by getattr is going to be presumed to be not just a function, but a bound method on that instance. So be sure to return that.
I actually did this recently. Here's an example of how I solved it:
class Example:
def FUNC_1(self, arg):
return arg - 1
def FUNC_2(self, arg):
return arg - 2
def decode(self, func, arg):
try:
exec( "result = self.FUNC_%s(arg)" % (func) )
except AttributeError:
# Call your default method here
result = self.default(arg)
return result
def default(self, arg):
return arg
and the output:
>>> dude = Example()
>>> print dude.decode(1, 0)
-1
>>> print dude.decode(2, 10)
8
>>> print dude.decode(3, 5)
5

Categories