I want to be able to write a bunch of tests in a format similar to this:
class TestPytest:
#given(3)
#expect(3)
def test_passes(self, g, e):
assert g == e
#given(3)
#expect(4)
def test_fails(self, g, e):
assert g == e
def test_boring(self): # for comparison
pass
(I'm not convinced this is a good idea, but I'll be taking it in further directions, so it's not as strange as it looks.)
To that end, I've attempted to write these decorators:
import functools
class WrappedTest(object):
def __init__(self, f):
self.func = f
self.given = []
self.expects = []
def __get__(self, instance, owner):
#functools.wraps(self.func)
def call(*a, **kw):
return self.func(instance, self.given, self.expects,
*a, **kw)
return call
def given(*objects):
def wrapped(test):
if not isinstance(test, WrappedTest):
test_tmp = WrappedTest(test)
test = functools.update_wrapper(test_tmp, test)
test.given.extend(objects)
return test
return wrapped
def expect(*objects):
def wrapped(test):
if not isinstance(test, WrappedTest):
test_tmp = WrappedTest(test)
test = functools.update_wrapper(test_tmp, test)
test.expects.extend(objects)
return test
return wrapped
But when I try to run this test, pytest doesn't find test_passes or test_fails. It does find test_boring.
My working hypothesis is that I haven't properly wrapped the test methods. They show up as functions rather than methods:
>>> test_pytest.TestPytest().test_fails
<function test_pytest.test_fails>
>>> test_pytest.TestPytest().test_boring
<bound method TestPytest.test_boring of <test_pytest.TestPytest instance at 0x101f3dab8>>
But I'm not sure how to fix this. I've tried changing functools.wraps(self.func) to functools.wraps(self.func.__get__(instance, owner)), under the theory that it would then wrap with a bound method rather than a function. But that was kind of a guess, and it didn't work.
I know pytest is capable of finding decorated functions written properly, so presumably I'm doing something wrong, but I'm not sure what.
It looks like I was wrong about wrapping. Looking through the pytest source, it treats nested classes differently from methods. It accesses members through __dict__, which ignores __get__, so WrappedTest wasn't successfully pretending to be a method.
I've replaced the WrappedTest instance with a function, and it seems to be working fine (even without the #functools.wraps line):
import functools
from collections import namedtuple
def wrap_test_method(meth):
if hasattr(meth, '_storage'):
return meth
Storage = namedtuple('Storage', ['given', 'expects'])
sto = Storage(given=[], expects=[])
#functools.wraps(meth)
def new_meth(self, *a, **kw):
return meth(self, sto.given, sto.expects, *a, **kw)
new_meth._storage = sto
return new_meth
def given(*objects):
def decorator(test_method):
new_test_method = wrap_test_method(test_method)
new_test_method._storage.given.extend(objects)
return new_test_method
return decorator
def expect(*objects):
def decorator(test_method):
new_test_method = wrap_test_method(test_method)
new_test_method._storage.expects.extend(objects)
return new_test_method
return decorator
Related
I'm trying to learn some advanced decorator usage. Specifically, I'm trying to to monkey patch a class's method via a decorator within a function.
This is a basic example to illustrate what I'm trying to do. I have a function something that does some stuff; and within that function there's an instance of a class. That instance I would like to monkey patch.
from functools import update_wrapper
class Foobar:
def get_something(self):
return "apple"
class FakeFoobar:
def get_something(self):
return "orange"
class my_decorator:
def __init__(self, original_function):
self._original_function = original_function
update_wrapper(self, original_function)
def __call__(self, *args, **kwargs):
# some magic here?
return self._original_function(*args, **kwargs)
#my_decorator
def something():
f = Foobar()
return f.get_something()
if __name__ == '__main__':
print(something())
I'm trying either trying to do a 1 to 1 replacement with Foobar to FakeFoobar or, monkey patch Foobar's get_something method to FakeFoobar's get_something method.
When I run the code above, I get the following:
>>> something()
'apple'
>>>
I would like to find some way augment the Foobar's get_something method so that we get the following output:
>>> something()
'orange'
>>>
There's a mock module within the unittests library, however, it's not clear to be how I could leverage that for my use case. I'm fairly married to the idea of not passing an argument into the decorator or an extra argument into the something function as many of the examples of the mock library show.
I also notice that the moto library is accomplishing something similar to what I'm trying to do. I tried digging into the source code, but it seems fairly complex for what I'm trying to do.
How about updating the global variables dict of the function?
from functools import update_wrapper
class Foobar:
def get_something(self):
return "apple"
class FakeFoobar:
def get_something(self):
return "orange"
class my_decorator:
def __init__(self, original_function):
self._original_function = original_function
update_wrapper(self, original_function)
def __call__(self, *args, **kwargs):
f = self._original_function
restore_val = f.func_globals['Foobar']
f.func_globals['Foobar'] = f.func_globals['FakeFoobar']
# ^^^^^ This is your magic-line.
try:
return f(*args, **kwargs)
except:
raise
finally:
f.func_globals['Foobar'] = restore_val
#my_decorator
def something():
f = Foobar()
return f.get_something()
if __name__ == '__main__':
print(something()) #Prints orange
print(Foobar().get_something()) #Prints apple
I have this class:
class SomeClass(object):
def __init__(self):
self.cache = {}
def check_cache(method):
def wrapper(self):
if method.__name__ in self.cache:
print('Got it from the cache!')
return self.cache[method.__name__]
print('Got it from the api!')
self.cache[method.__name__] = method(self)
return self.cache[method.__name__]
return wrapper
#check_cache
def expensive_operation(self):
return get_data_from_api()
def get_data_from_api():
"This would call the api."
return 'lots of data'
The idea is that I can use the #check_cache decorator to keep the expensive_operation method from calling an api additional times if the result is already cached.
This works fine, it seems.
>>> sc.expensive_operation()
Got it from the api!
'lots of data'
>>> sc.expensive_operation()
Got it from the cache!
'lots of data'
But I would love to be able to test it with another decorator:
import unittest
class SomeClassTester(SomeClass):
def counted(f):
def wrapped(self, *args, **kwargs):
wrapped.calls += 1
return f(self, *args, **kwargs)
wrapped.calls = 0
return wrapped
#counted
def expensive_operation(self):
return super().expensive_operation()
class TestSomeClass(unittest.TestCase):
def test_api_is_only_called_once(self):
sc = SomeClassTester()
sc.expensive_operation()
self.assertEqual(sc.expensive_operation.calls, 1) # is 1
sc.expensive_operation()
self.assertEqual(sc.expensive_operation.calls, 1) # but this goes to 2
unittest.main()
The problem is that the counted decorator counts the number of times the wrapper function is called, not this inner function.
How do I count that from SomeClassTester?
There's no easy way to do this. Your current test applies the decorators in the wrong order. You want check_cache(counted(expensive_operation)), but you're getting the counted decorator on the outside instead: counted(check_cache(expensive_operation)).
There's no easy way to fix this within the counted decorator, because by the time it gets called, the original function is already wrapped up by the check_cache decorator, and there's no easy way to change the wrapper (it holds its reference to the original function in a closure cell, which is read-only from the outside).
One possible way to make it work is to rebuild the whole method with the decorators in the desired order. You can get a reference to the original method from the closure cell:
class SomeClassTester(SomeClass):
def counted(f):
def wrapped(self, *args, **kwargs):
wrapped.calls += 1
return f(self, *args, **kwargs)
wrapped.calls = 0
return wrapped
expensive_operation = SomeClass.check_cache(
counted(SomeClass.expensive_operation.__closure__[0].cell_value)
)
This is of course far from ideal, since you need to know exactly what decorators are being applied on the method in SomeClass in order to apply them again properly. You also need to know the internals of those decorators so that you can get the right closure cell (the [0] index may not be correct if the other decorator gets changed to differently).
Another (perhaps better) approach might be to change SomeClass in such a way that you can inject your counting code in between the changed method and the expensive bit you want to count. For example, you could have the real expensive part be in _expensive_method_implementation, while the decorated expensive_method is just a simple wrapper that calls it. The test class can override the _implementation method with its own decorated version (which might even skip the actually expensive part and just return dummy data). It doesn't need to override the regular method or mess with its decorators.
It is impossible to do this, without modifying the base class to provide hooks or changing the whole decorated function in derived class based on internal knowledge of base class. Though there is a third way based on internal working of cache decorator, basically change your cache dict so that it counts
class CounterDict(dict):
def __init__(self, *args):
super().__init__(*args)
self.count = {}
def __setitem__(self, key, value):
try:
self.count[key] += 1
except KeyError:
self.count[key] = 1
return super().__setitem__(key, value)
class SomeClassTester(SomeClass):
def __init__(self):
self.cache = CounterDict()
class TestSomeClass(unittest.TestCase):
def test_api_is_only_called_once(self):
sc = SomeClassTester()
sc.expensive_operation()
self.assertEqual(sc.cache.count['expensive_operation'], 1) # is 1
sc.expensive_operation()
self.assertEqual(sc.cache.count['expensive_operation'], 1) # is 1
Is there are a way to pass a variable between two python decorators applied to the same function? The goal is for one of the decorators to know that the other was also applied. I need something like decobar_present() from the example below:
def decobar(f):
def wrap():
return f() + "bar"
return wrap
def decofu(f):
def wrap():
print decobar_present() # Tells me whether decobar was also applied
return f() + "fu"
return wrap
#decofu
#decobar
def important_task():
return "abc"
More generally I would like to be able to modify the behavior of decofu depending on whether decobar was also applied.
You can add the function to a "registry" when decobar is applied to it, then later check the registry to determine whether decobar was applied to the function or not. This approach requires preserving original function's __module__ and __name__ properties intact (use functools.wraps over the wrapper function for that).
import functools
class decobar(object):
registry = set()
#classmethod
def _func_key(cls, f):
return '.'.join((f.__module__, f.func_name))
#classmethod
def present(cls, f):
return cls._func_key(f) in cls.registry
def __call__(self, f):
self.registry.add(self._func_key(f))
#functools.wraps(f)
def wrap():
return f() + "bar"
return wrap
# Make the decorator singleton
decobar = decobar()
def decofu(f):
#functools.wraps(f)
def wrap():
print decobar.present(f) # Tells me whether decobar was also applied
return f() + "fu"
return wrap
#decofu
#decobar
def important_task():
return "abc"
Used a class to implement decobar, as it keeps registry and present() in a single namespace (which feels slighly cleaner, IMO)
To pass a variable between two python decorators you can use the decorated function's keyword arguments dictionary. Only don't forget to pop the added argument from there before calling the function from within the second decorator.
def decorator1(func):
def wrap(*args, **kwargs):
kwargs['cat_says'] = 'meow'
return func(*args, **kwargs)
return wrap
def decorator2(func):
def wrap(*args, **kwargs):
print(kwargs.pop('cat_says'))
return func(*args, **kwargs)
return wrap
class C:
#decorator1
#decorator2
def spam(self, a, b, c, d=0):
print("Hello, cat! What's your favourite number?")
return a + b + c + d
x=C()
print(x.spam(1, 2, 3, d=7))
While it is possible to do things like manipulate the stack trace, you're better off, I think, simply creating a function decofubar and incorporate as much of both "fu" and "bar" as possible. At a minimum, it will make your code cleaner and more obvious.
Each decorator gets to wrap another function. The function passed to decofu() is the result of the decobar() decorator.
Just test for specific traits of the decobar wrapper, provided you make the wrapper recognisable:
def decobar(f):
def wrap():
return f() + "bar"
wrap.decobar = True
return wrap
def decofu(f):
def wrap():
print 'decobar!' if getattr(f, 'decobar') else 'not decobar'
return f() + "fu"
return wrap
I used an arbitrary attribute on the wrapper function, but you could try to test for a name (not so unambiguous), for the signature (using inspect.getargspec() perhaps), etc.
This is limited to direct wrapping only.
Generally speaking, you don't want to couple decorators as tightly as all this. Work out a different solution and only depend on function signature or return values.
You can assign flag to f (or rather wrap) in decobar just like this
def decobar(f):
def wrap():
return f() + "bar"
wrap.decobar_applied = True
return wrap
def decofu(f):
def wrap():
if hasattr(f, 'decobar_applied') and f.decobar_applied:
print decobar_present() # Tells me whether decobar was also applied
return f() + "fu"
return wrap
#decofu
#decobar
def important_task():
return "abc"
I'm attempting to implement a decorator on certain methods in a class so that if the value has NOT been calculated yet, the method will calculate the value, otherwise it will just return the precomputed value, which is stored in an instance defaultdict. I can't seem to figure out how to access the instance defaultdict from inside of a decorator declared outside of the class. Any ideas on how to implement this?
Here are the imports (for a working example):
from collections import defaultdict
from math import sqrt
Here is my decorator:
class CalcOrPass:
def __init__(self, func):
self.f = func
#if the value is already in the instance dict from SimpleData,
#don't recalculate the values, instead return the value from the dict
def __call__(self, *args, **kwargs):
# can't figure out how to access/pass dict_from_SimpleData to here :(
res = dict_from_SimpleData[self.f.__name__]
if not res:
res = self.f(*args, **kwargs)
dict_from_SimpleData[self.f__name__] = res
return res
And here's the SimpleData class with decorated methods:
class SimpleData:
def __init__(self, data):
self.data = data
self.stats = defaultdict() #here's the dict I'm trying to access
#CalcOrPass
def mean(self):
return sum(self.data)/float(len(self.data))
#CalcOrPass
def se(self):
return [i - self.mean() for i in self.data]
#CalcOrPass
def variance(self):
return sum(i**2 for i in self.se()) / float(len(self.data) - 1)
#CalcOrPass
def stdev(self):
return sqrt(self.variance())
So far, I've tried declaring the decorator inside of SimpleData, trying to pass multiple arguments with the decorator(apparently you can't do this), and spinning around in my swivel chair while trying to toss paper airplanes into my scorpion tank. Any help would be appreciated!
The way you define your decorator the target object information is lost. Use a function wrapper instead:
def CalcOrPass(func):
#wraps(func)
def result(self, *args, **kwargs):
res = self.stats[func.__name__]
if not res:
res = func(self, *args, **kwargs)
self.stats[func.__name__] = res
return res
return result
wraps is from functools and not strictly necessary here, but very convenient.
Side note: defaultdict takes a factory function argument:
defaultdict(lambda: None)
But since you're testing for the existence of the key anyway, you should prefer a simple dict.
You can't do what you want when your function is defined, because it is unbound. Here's a way to achieve it in a generic fashion at runtime:
class CalcOrPass(object):
def __init__(self, func):
self.f = func
def __get__(self, obj, type=None): # Cheat.
return self.__class__(self.f.__get__(obj, type))
#if the value is already in the instance dict from SimpleData,
#don't recalculate the values, instead return the value from the dict
def __call__(self, *args, **kwargs):
# I'll concede that this doesn't look very pretty.
# TODO handle KeyError here
res = self.f.__self__.stats[self.f.__name__]
if not res:
res = self.f(*args, **kwargs)
self.f.__self__.stats[self.f__name__] = res
return res
A short explanation:
Our decorator defines __get__ (and is hence said to be a descriptor). Whereas the default behaviour for an attribute access is to get it from the object's dictionary, if the descriptor method is defined, Python will call that instead.
The case with objects is that object.__getattribute__ transforms an access like b.x into type(b).__dict__['x'].__get__(b, type(b))
This way we can access the bound class and its type from the descriptor's parameters.
Then we create a new CalcOrPass object which now decorates (wraps) a bound method instead of the old unbound function.
Note the new style class definition. I'm not sure if this will work with old-style classes, as I haven't tried it; just don't use those. :) This will work for both functions and methods, however.
What happens to the "old" decorated functions is left as an exercise.
I'd like to provide the capability for users of one of my modules to extend its capabilities by providing an interface to call a user's function. For example, I want to give users the capability to be notified when an instance of a class is created and given the opportunity to modify the instance before it is used.
The way I've implemented it is to declare a module-level factory function that does the instantiation:
# in mymodule.py
def factory(cls, *args, **kwargs):
return cls(*args, **kwargs)
Then when I need an instance of a class in mymodule, I do factory(cls, arg1, arg2) rather than cls(arg1, arg2).
To extend it, a programmer would write in another module a function like this:
def myFactory(cls, *args, **kwargs):
instance = myFactory.chain(cls, *args, **kwargs)
# do something with the instance here if desired
return instance
Installation of the above callback looks like this:
myFactory.chain, mymodule.factory = mymodule.factory, myFactory
This seems straightforward enough to me, but I was wondering if you, as a Python programmer, would expect a function to register a callback rather than doing it with an assignment, or if there were other methods you would expect. Does my solution seem workable, idiomatic, and clear to you?
I am looking to keep it as simple as possible; I don't think most applications will actually need to chain more than one user callback, for example (though unlimited chaining comes "for free" with the above pattern). I doubt they will need to remove callbacks or specify priorities or order. Modules like python-callbacks or PyDispatcher seem to me like overkill, especially the latter, but if there are compelling benefits to a programmer working with my module, I'm open to them.
Taking aaronsterling's idea a bit further:
class C(object):
_oncreate = []
def __new__(cls):
return reduce(lambda x, y: y(x), cls._oncreate, super(C, cls).__new__(cls))
#classmethod
def oncreate(cls, func):
cls._oncreate.append(func)
c = C()
print hasattr(c, 'spew')
#C.oncreate
def spew(obj):
obj.spew = 42
return obj
c = C()
print c.spew
Combining Aaron's idea of using a decorator and Ignacio's idea of a class that maintains a list of attached callbacks, plus a concept borrowed from C#, I came up with this:
class delegate(object):
def __init__(self, func):
self.callbacks = []
self.basefunc = func
def __iadd__(self, func):
if callable(func):
self.__isub__(func)
self.callbacks.append(func)
return self
def callback(self, func):
if callable(func):
self.__isub__(func)
self.callbacks.append(func)
return func
def __isub__(self, func):
try:
self.callbacks.remove(func)
except ValueError:
pass
return self
def __call__(self, *args, **kwargs):
result = self.basefunc(*args, **kwargs)
for func in self.callbacks:
newresult = func(result)
result = result if newresult is None else newresult
return result
Decorating a function with #delegate allows other functions to be "attached" to it.
#delegate
def intfactory(num):
return int(num)
Functions can be added to the delegate with += (and removed with -=). You can also decorate with funcname.callback to add a callback function.
#intfactory.callback
def notify(num):
print "notify:", num
def increment(num):
return num+1
intfactory += increment
intfactory += lambda num: num * 2
print intfactory(3) # outputs 8
Does this feel Pythonic?
I might use a decorator so that the user could just write.
#new_factory
def myFactory(cls, *args, **kwargs):
instance = myFactory.chain(cls, *args, **kwargs)
# do something with the instance here if desired
return instance
Then in your module,
import sys
def new_factory(f):
mod = sys.modules[__name__]
f.chain = mod.factory
mod.factory = f
return f