Storing state between decorators in python - python

I'll start by saying, I have a suspicion this is a solution that could be solved with a functional programming approach, but I don't know nearly enough of the concepts (but have been trying).
I've based my current solution on:
https://pythonconquerstheuniverse.wordpress.com/2012/04/29/python-decorators/
http://www.brianholdefehr.com/decorators-and-functional-python
https://github.com/mitsuhiko/click/blob/master/click/decorators.py
In the interest of learning (that's all this is!) how to build decorators, I decided to make a simple cache decorator.
But I get stuck in a loop where, I try to encapsulate the function I'm wrapping in a class, but every time I call the function I've wrapped, I call __call__ in wrapping class and so on ad infinitum.
I think I could have a nest of closures between the chain decorators, but I don't know how to collect all my variables in one scope.
I appreciate I could put all my arguments in a single decorator call, but my intention here is to learn how to chain decorators and store state between them.
Can anyone suggest a way (or ammend my way) to store state between chained decorators?
My intended design was:
# main.py
import http.client
from cache import cache
#cache.keys('domain', 'url')
#cache.lifetime(3600)
def make_http_request(domain,url='/'):
conn = httplib.HTTPConnection(domain)
conn.request("GET",url)
return conn.getresponse()
if __name__ == '__main__':
print(make_http_request('http://example.com/'))
with cache.py looking like
import hashlib
import os
import inspect
__author__ = 'drews'
def expand(path):
return os.path.abspath(os.path.expanduser(path))
class CacheManager():
"""Decorator to take the result and store it in a a file. If the result is needed again, then the file result is returned"""
def __init__(self, function, function_arg_name):
self.lifetime = 3600
self.cache_keys = None
self.cache_path = '~/.decorator_cache/'
self.f = function
self.arg_names = function_arg_name
def __call__(self, *args, **kwargs):
if len(args) > 0:
arg_names = self.arg_names
if 'self' in arg_names:
arg_names.remove('self')
key_args = dict(zip(arg_names, args))
key_args.update(kwargs)
else:
key_args = kwargs
self._initialise(cache_path=expand(self.cache_path))
key = self._build_key(key_args)
if self.key_exists(key):
result = self.get_key(key)
else:
result = self.f()
self.set_key(key, result)
return result
def _build_key(self, key_elements):
m = hashlib.md5()
for key in self.cache_keys:
m.update(key_elements[key].encode('utf-8'))
return m.hexdigest()
def _initialise(self, cache_path):
def initialise_path(path):
if not os.path.isdir(path):
(head, tail) = os.path.split(path)
if not os.path.isdir(head):
initialise_path(head)
os.mkdir(path)
initialise_path(cache_path)
def key_exists(self, key):
path = os.path.join(expand(self.cache_path), key)
return os.path.exists(path)
class CacheDefinitionDecorator(object):
def __init__(self, *args, **kwargs):
self.d_args = args
class CacheKeyDefinitionDecorator(CacheDefinitionDecorator):
def __call__(self, func, *args, **kwargs):
if not isinstance(func, CacheManager):
func = CacheManager(func,inspect.getargspec(func)[0])
func.cache_keys = self.d_args
return func
class CacheLifetimeDefintionDecorator(CacheDefinitionDecorator):
def __call__(self, func, *args, **kwargs):
if not isinstance(func, CacheManager):
func = CacheManager(func,inspect.getargspec(func)[0])
func.lifetime = self.d_args[0]
return func
class CacheStruct(object):
def __init__(self, **kwargs):
for item in kwargs:
setattr(self, item, kwargs[item])
cache = CacheStruct(
keys=CacheKeyDefinitionDecorator,
lifetime=CacheLifetimeDefintionDecorator
)

Related

How to decorate an instance with code-blocks and accessing/using the variables of the instance?

I try to build a decorator for methods of instances (not classes) that flexibly puts code blocks in front and/or behind the method (and not affect other instances). Up to my code below works:
def Decorate(func, before = None, after = None):
def wrap(*args, **kwargs):
if before: before() # code block insert
result = func(*args, **kwargs)
if after: after() # code block insert
return result
return wrap
class Test():
def __init__(self, name):
self.name = name
def put(self, prefix):
print(prefix, self.name)
a = Test('me')
def Before():
print('before')
def After():
print('after')
a.put = Decorate(a.put, Before, After)
a.put('it is')
How can I extend the code blocks accessing/using variables and methods of the instance? A code example for this would look like this:
def Before():
print('before')
print(self.name)
self.any_method(any_argument) # just an example!
I already tried several things without success. And I already struggle to access the instance values directly in the wrapper:
def Decorate(func, before = None, after = None):
def wrap(self, *args, **kwargs):
if before: before() # code block insert
print(self.name) # --> even this DOES NOT WORK!
result = func(self, *args, **kwargs)
if after: after() # code block insert
return result
return wrap
Here print(self.name) throws an error: AttributeError: 'str' object has no attribute 'name'. So it looks like that I am far away in using the same comment in one of the code blocks (Before() & After()) below.
One addition: The approach works when I add a method to the instance:
This method is in the class (so for working with strings and exec, but that enables to deliver the name as string or the function itself):
def addMethod(self, method, givenName = ''):
print('add')
if givenName == '':
N = method.__name__
else:
N = givenName
self._methods.append(N)
exec('self.' + N + ' = ' + method.__name__ + '.__get__(self)')
The code in the main part looks like this:
def x(self):
print('hello,', self.name)
a.addMethod(x)
a.x()
Any solution is appreciated and many thanks in advance!
from functools import wraps
def Decorate(func, before = None, after = None):
#wraps(func)
def wrap(*args, **kwargs):
if before: before() # code block insert
result = func(*args, **kwargs)
if after: after() # code block insert
return result
return wrap
def Before():
print('before')
def After():
print('after')
class Test():
def __init__(self, name):
self.name = name
def put(self, prefix):
print(prefix, self.name)
put = Decorate(put, Before, After)
a = Test('me')
a.put("pre")
You could execute your decorator inside your class. In your wrap, pass whatever you get to func by (*args, **kwargs). self is still the first argument implicitly in args.
Edit: Code related concerns from comments
from functools import wraps
def Before(t):
print('before')
print(t.name)
def After(t):
print('after')
print(t.name)
def Decorate(func, before = None, after = None):
#wraps(func)
def wrap(*args, **kwargs):
if before: before(args[0]) # code block insert
result = func(*args, **kwargs)
if after: after(args[0]) # code block insert
return result
return wrap
class Test():
def __init__(self, name):
self.name = name
def put(self, prefix):
print(prefix, self.name)
put = Decorate(put, Before, After)
a = Test('me')
a.put("pre")

Set methods with decorators dynamically

I have this class:
class SomeMixin:
permissions = (SomePermission,)
methods = ('GET',)
#list_route(methods=methods, url_path='en', permission_classes=permissions)
def en_list(self):
return get_translated_objects(self, 'en')
#list_route(methods=methods, url_path='ru', permission_classes=permissions)
def ru_list(self):
return get_translated_objects(self, 'ru')
#detail_route(methods=methods, url_path='en', permission_classes=permissions)
def en_detail(self):
return get_translated_object(self.get_object(), 'en')
#detail_route(methods=methods, url_path='ru', permission_classes=permissions)
def ru_detail(self):
return get_translated_object(self.get_object(), 'ru')
I can have more languages in the future and it's not a good solution.
I thought to create loop of languages list and add methods to the class with setattr(self, func_name, func), like:
langs = ('en', 'ru')
for lang in langs:
setattr(self, func.__name__, func)
But I should add decorators to every method, how can I do it?
I believe you should be able to patch the contents of this example into your code. That being said, it seems like a better approach to detect the language from the http header and return an appropriate response.
This approach though functional is not the cleanest.
# This decorator is meant to simulate the decorator exposed by django
def route(url, language, method):
def wrapper(func):
def inner(*args, **kwargs):
print('url => {0}'.format(url))
print('language => {0}'.format(language))
print('method => {0}'.format(method))
func(*args, **kwargs)
return inner
return wrapper
# This class is analogous to your SomeMixin class
class foo(object):
def __init__(self):
method = 'GET'
# fields holds the parameters that will change for each method like url
# and language
fields = (('en', '/en', 'en_list'), ('ru', '/ru', 'ru_list'))
for lang, url, func_name in fields:
setattr(self, func_name, route(url=url, language=lang, method=method)(self.func_creator(lang)))
def func_creator(self, language):
def inner():
print('hello in {0}'.format(language))
return inner
def main():
foo_instance = foo()
print('Calling foo.en_list()')
foo_instance.en_list()
print()
print('Calling foo.ru_list()')
foo_instance.ru_list()
if __name__ == '__main__':
main()

How can I cache function results and update cache value everytime invoke the function?

I am here looking for some help:
I have a function, written by Python2.7, it took a long time to return results, so I would like using cache to store result, everytime the function was invoked, value in cache should returned and a new result return from function should update the cache value asynchromously. Is this possible?
for short:
cache function result.
everytime invoke the function, return cache value if function cache_key in cache, otherwise, return a default value. In the meantime, get the real-time function return value, and update the cache.
I've tried:
1. cachetools
import time
from cachetools import cached, TTLCache
cache = TTLCache(maxsize=1, ttl=360)
#cached(cache)
def expensive_io():
time.sleep(300)
return 1.0
But first time the expensive_io function was invoked, I still have to wait for 300s, and can't update the cache value until ttl timeout. After ttl timeout, I have to spend another 300s to wait for the results.
So, I wonder maybe I can use threading?:
2. threading
from threading import Thread
import Queue
class asynchronous(object):
def __init__(self, func, maxsize=128, cache=OrderedDict()):
self.func = func
self.maxsize = maxsize
self.cache = cache
self.currsize = len(cache)
def getfuncthread(*args, **kwargs):
key = self.cache_key("{0}-{1}-{2}".format(self.func.__name__, str(*args), str(**kwargs)))
if self.currsize >= self.maxsize:
self.cache.popitem(False)
if not self.cache:
self.cache[key] = func(*args, **kwargs)
self.queue.put(self.cache[key])
def returnthread(*args, **kwargs):
key = self.cache_key("{0}-{1}-{2}".format(self.func.__name__, str(*args), str(**kwargs)))
if key in self.cache:
return self.cache[key]
else:
return 2222
self.returnthread = returnthread
self.getfuncthread = getfuncthread
def cache_key(self, s):
return hashlib.sha224(s).hexdigest()
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def start(self, *args, **kwargs):
self.queue = Queue()
thread1 = Thread(target=self.getfuncthread, args=args, kwargs=kwargs)
thread2 = Thread(target=self.returnthread, args=args, kwargs=kwargs)
thread1.start()
thread2.start()
return asynchronous.Result(self.queue, thread2)
class NotYetDoneException(Exception):
def __init__(self, message):
self.message = message
class Result(object):
def __init__(self, queue, thread):
self.queue = queue
self.thread = thread
def is_done(self):
return not self.thread.is_alive()
def get_result(self):
if not self.is_done():
raise asynchronous.NotYetDoneException('the call has not yet completed its task')
if not hasattr(self, 'result'):
self.result = self.queue.get()
return self.result
#asynchronous
def expensive_io(n):
time.sleep(300)
return n*n
if __name__ == '__main__':
# sample usage
import time
result1 = expensive_io.start(2)
result2 = expensive_io.start(2)
result3 = expensive_io.start(4)
try:
print "result1 {0}".format(result1.get_result())
print "result2 {0}".format(result2.get_result())
print "result3 {0}".format(result3.get_result())
except asynchronous.NotYetDoneException as ex:
print ex.message
I was thinking, two threads in the asynchronous decorator:
returnThread is used to return value from cache if cache_key in cache, if not, return a default value immediately.
getfuncthread is used to get function value by invoking func, and put it in cache and queue.
That seems logical, but still not working.
3. asyncio
Can I use asyncio ? But python2.7 not support asyncio, I found trollius package. But still dont know how to deal with it.
Any ideas would be appreciate.
Since you are forced to use Python 2.7, you won't have access to many options from the PSF's modern library implementations, including those with asyncio (as you said) and what you want (functools). Here's a solution that uses a function created by a close friend of mine (https://github.com/iwalton3), and some boilerplate code from the CPython github source:
def wraps(wrapped,
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as the
remaining arguments. Default arguments are as for update_wrapper().
This is a convenience function to simplify applying partial() to
update_wrapper().
"""
return partial(update_wrapper, wrapped=wrapped,
assigned=assigned, updated=updated)
class partial:
"""New function with partial application of the given arguments
and keywords.
"""
__slots__ = "func", "args", "keywords", "__dict__", "__weakref__"
def __new__(cls, func, /, *args, **keywords):
if not callable(func):
raise TypeError("the first argument must be callable")
if hasattr(func, "func"):
args = func.args + args
keywords = {**func.keywords, **keywords}
func = func.func
self = super(partial, cls).__new__(cls)
self.func = func
self.args = args
self.keywords = keywords
return self
def __call__(self, /, *args, **keywords):
keywords = {**self.keywords, **keywords}
return self.func(*self.args, *args, **keywords)
def __reduce__(self):
return type(self), (self.func,), (self.func, self.args,
self.keywords or None, self.__dict__ or None)
def __setstate__(self, state):
if not isinstance(state, tuple):
raise TypeError("argument to __setstate__ must be a tuple")
if len(state) != 4:
raise TypeError(f"expected 4 items in state, got {len(state)}")
func, args, kwds, namespace = state
if (not callable(func) or not isinstance(args, tuple) or
(kwds is not None and not isinstance(kwds, dict)) or
(namespace is not None and not isinstance(namespace, dict))):
raise TypeError("invalid partial state")
args = tuple(args) # just in case it's a subclass
if kwds is None:
kwds = {}
elif type(kwds) is not dict: # XXX does it need to be *exactly* dict?
kwds = dict(kwds)
if namespace is None:
namespace = {}
self.__dict__ = namespace
self.func = func
self.args = args
self.keywords = kwds
def make_dynamic(function):
cache = {}
#wraps(function)
def result(*args, clear_cache=False, ignore_cache=False, skip_cache=False, **kwargs):
nonlocal cache
call = (args, tuple(kwargs.items()))
if clear_cache:
cache = {}
if call in cache and not ignore_cache:
return cache[call]
res = function(*args, **kwargs)
if not skip_cache:
cache[call] = res
return res
return result
Altogether, this means you should be able to decorate your expensive IO calculations with a #make_dynamic tag above your def functionName(args, ...): line. However, take care to ensure that all args are hashable. For any readers who are not familiar with hashability, it just means that you can use the hash(object_name) on the interpreter and it returns a unique integer. These types include strings, numbers, tuples, etc.

Add methods to a class generated from other methods

I have classes like this:
class Tool(object):
def do_async(*args):
pass
for which I want to automatically generate non-async methods that make use of the async methods:
class Tool(object):
def do_async(*args):
pass
def do(*args):
result = self.do_async(*args)
return magical_parser(result)
This gets to be particularly tricky because each method needs to be accessible as both an object and class method, which is normally achieved with this magical decorator:
class class_or_instance(object):
def __init__(self, fn):
self.fn = fn
def __get__(self, obj, cls):
if obj is not None:
f = lambda *args, **kwds: self.fn(obj, *args, **kwds)
else:
f = lambda *args, **kwds: self.fn(cls, *args, **kwds)
functools.update_wrapper(f, self.fn)
return f
How can I make these methods, and make sure they're accessible as both class and object methods? This seems like something that could be done with decorators, but I am not sure how.
(Note that I don't know any of the method names in advance, but I know that all of the methods that need new buddies have _async at the end of their names.)
I think I've gotten fairly close, but this approach does not appropriately set the functions as class/object methods:
def process_asyncs(cls):
methods = cls.__dict__.keys()
for k in methods:
methodname = k.replace("_async","")
if 'async' in k and methodname not in methods:
#class_or_instance
def method(self, verbose=False, *args, **kwargs):
response = self.__dict__[k](*args,**kwargs)
result = self._parse_result(response, verbose=verbose)
return result
method.__docstr__ = ("Returns a table object.\n" +
cls.__dict__[k].__docstr__)
setattr(cls,methodname,MethodType(method, None, cls))
Do not get the other method from the __dict__; use getattr() instead so the descriptor protocol can kick in.
And don't wrap the method function in a MethodType() object as that'd neutralize the descriptor you put on method.
You need to bind k to the function you generate; a closured k would change with the loop:
#class_or_instance
def method(self, verbose=False, _async_method_name=k, *args, **kwargs):
response = getattr(self, _async_method_name)(*args,**kwargs)
result = self._parse_result(response, verbose=verbose)
return result
cls.__dict__[methodname] = method
Don't forget to return cls at the end; I've changed this to use a separate function to create a new scope to provide a new local name _async_method_name instead of a keyword parameter; this avoids difficulties with *args and explicit keyword arguments:
def process_asyncs(cls):
def create_method(async_method):
#class_or_instance
def newmethod(self, *args, **kwargs):
if 'verbose' in kwargs:
verbose = kwargs.pop('verbose')
else:
verbose = False
response = async_method(*args,**kwargs)
result = self._parse_result(response, verbose=verbose)
return result
return newmethod
methods = cls.__dict__.keys()
for k in methods:
methodname = k.replace("_async","")
if 'async' in k and methodname not in methods:
async_method = getattr(cls, k)
setattr(cls, methodname, create_method(async_method))
return cls

Class Decorator decorating method in python [duplicate]

This question already has answers here:
How can I decorate an instance method with a decorator class?
(2 answers)
Closed 4 years ago.
I'm trying to memoize using a decorator with the decorator being a class not a function, but I'm getting the error
TypeError: seqLength() takes exactly 2 arguments (1 given)
I'm guessing this has something to do with the classes, but not sure what's wrong from there.
The code:
import sys
class memoize(object):
'''memoize decorator'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(self, *args)
self.cache[args] = value
return value
class collatz(object):
def __init__(self, n):
self.max = 1
self.n = n
#memoize
def seqLength(self, n):
if n>1:
if n%2 == 0:
return 1+self.seqLength(n/2)
else:
return 1+self.seqLength(3*n+1)
else:
return 1
def maxLength(self):
for n in xrange(1, self.n):
l = self.seqLength(n)
if l > self.max:
self.max = n
return self.max
n = int(sys.argv[1])
c = collatz(n)
print c.maxLength()
This is confusing, syntactically. It's not clear if self.func is part of your memoize or a separate function that's part of some other object of some other class. (You mean the latter, BTW)
value = self.func(self, *args)
Do this to make it clear that the_func is just a function, not a member of the memoize class.
the_func= self.func
value= the_func( *args )
That kind of thing prevents confusion over the class to which self. is bound.
Also, please spell it Memoize. With a leading capital letter. It is a class definition, after all.
Using a class as a decorator is tricky, because you have to implement the descriptor protocol correctly (the currently accepted answer doesn't.) A much, much easier solution is to use a wrapper function, because they automatically implement the descriptor protocol correctly. The wrapper equivalent of your class would be:
import functools
def memoize(func):
cache = {}
#functools.wraps(func)
def wrapper(*args):
try:
return cache[args]
except KeyError:
value = func(*args)
cache[args] = value
return value
return wrapper
When have so much state you want to encapsulate it in a class anyway, you can still use a wrapper function, for example like so:
import functools
class _Memoize(object):
'''memoize decorator helper class'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
def memoize(func):
o = _Memoize(func)
#functools.wraps(func)
def wrapper(*args):
return o(*args)
return wrapper
A decorator is just syntactic sugar for foo = decorator(foo), so in this case you're ending up making the self of seqLength be memoize instead of collatz. You need to use descriptors. This code works for me:
class memoize(object):
'''memoize descriptor'''
def __init__(self, func):
self.func = func
def __get__(self, obj, type=None):
return self.memoize_inst(obj, self.func)
class memoize_inst(object):
def __init__(self, inst, fget):
self.inst = inst
self.fget = fget
self.cache = {}
def __call__(self, *args):
# if cache hit, done
if args in self.cache:
return self.cache[args]
# otherwise populate cache and return
self.cache[args] = self.fget(self.inst, *args)
return self.cache[args]
More on descriptors:
http://docs.python.org/howto/descriptor.html#descriptor-example

Categories