I have a function that returns a future. I want to create a decorator to the function which waits for the future to complete and then return the result essentially converting the async function to blocking function (which I will use in my REST API). Is there a way to do that?
def sync(fn):
def wrapped(*args, **kwargs):
return IOLoop.instance().run_sync(lambda: fn(*args, **kwargs))
return wrapped
#gen.coroutine
def my_coro():
# ...
sync_fn = sync(my_coro)
result = sync_fn()
To resolve a future you need to yield it. Something like this might work:
from tornado import gen
def blocking(func):
def new_func(*args, **kwargs):
result = yield func(*args, **kwargs)
return result
return gen.coroutine(new_func)
Related
I am using the following approach to pass in an optional argument to a decorator:
def wait(func=None, delay=1.0):
def decorator_wait(func):
def wrapper_wait(*args, **kwargs):
time.sleep(delay)
return func(*args, **kwargs)
return wrapper_wait
return decorator_wait(func) if func is not None else decorator_wait
#wait
def print_something(something):
print (something)
#wait(delay=0.2)
def print_something_else(something):
print (something)
The above code looks pretty difficult to follow though with all the nesting. Is there another approach to do the above, or is this the only method available for something like this?
You can avoid having to remember "do I need to call this or not?" by removing the func argument from the wait function, and remembering to always call your decorator-returner.
It would look like this:
def wait(delay=1.0):
def decorator_wait(func):
def wrapper_wait(*args, **kwargs):
time.sleep(delay)
return func(*args, **kwargs)
return wrapper_wait
return decorator_wait
#wait()
def print_something(something):
print (something)
#wait(delay=0.2)
def print_something_else(something):
print (something)
print_something("hello")
# 1 second delay, then 'hello'
print_something_else("there")
# 0.2 second delay, then 'there'
You just have to remember that wait will always return the decorator, so you have to use () when decorating your functions.
I think it is a little bit better:
import functools
import time
def wait(func=None, delay=1.0):
if func is None:
return lambda func: wait(func=func, delay=delay)
#functools.wraps(func) # this is good practice to use it see: https://stackoverflow.com/questions/308999/what-does-functools-wraps-do
def _wrapper(*args, **kwargs):
time.sleep(delay)
return func(*args, **kwargs)
return _wrapper
#wait
def test():
return
#wait(delay=3)
def test2():
return
You can write classes having a __call__ method, instead of writing a bunch of nested defs.
It sounds like you want a decorator Wait which haults
program execution for a few seconds.
If you don't pass in a Wait-time
then the default value is 1 seconds.
Use-cases are shown below.
##################################################
#Wait
def print_something(something):
print(something)
##################################################
#Wait(3)
def print_something_else(something_else):
print(something_else)
##################################################
#Wait(delay=3)
def print_something_else(something_else):
print(something_else)
When Wait has an argument, such as #Wait(3), then the call Wait(3)
is executed before anything else happens.
That is, the following two pieces of code are equivalent
#Wait(3)
def print_something_else(something_else):
print(something_else)
###############################################
return_value = Wait(3)
#return_value
def print_something_else(something_else):
print(something_else)
This is a problem.
if `Wait` has no arguments:
`Wait` is the decorator.
else: # `Wait` receives arguments
`Wait` is not the decorator itself.
Instead, `Wait` ***returns*** the decorator
One solution is shown below:
Let us begin by creating the following class, DelayedDecorator:
import io
class DelayedDecorator:
def __init__(i, cls, *args, **kwargs):
print("Delayed Decorator __init__", cls, args, kwargs)
i._cls = cls
i._args = args
i._kwargs = kwargs
def __call__(i, func):
print("Delayed Decorator __call__", func)
if not (callable(func)):
import io
with io.StringIO() as ss:
print(
"If only one input, input must be callable",
"Instead, received:",
repr(func),
sep="\n",
file=ss
)
msg = ss.getvalue()
raise TypeError(msg)
return i._cls(func, *i._args, **i._kwargs)
Now we can write things like:
dec = DelayedDecorator(Wait, delay=4)
#dec
def delayed_print(something):
print(something)
Note that:
dec does not not accept multiple arguments.
dec only accepts the function to be wrapped.
import inspect
class PolyArgDecoratorMeta(type):
def __call__(Wait, *args, **kwargs):
try:
arg_count = len(args)
if (arg_count == 1):
if callable(args[0]):
SuperClass = inspect.getmro(PolyArgDecoratorMeta)[1]
r = SuperClass.__call__(Wait, args[0])
else:
r = DelayedDecorator(Wait, *args, **kwargs)
else:
r = DelayedDecorator(Wait, *args, **kwargs)
finally:
pass
return r
import time
class Wait(metaclass=PolyArgDecoratorMeta):
def __init__(i, func, delay = 2):
i._func = func
i._delay = delay
def __call__(i, *args, **kwargs):
time.sleep(i._delay)
r = i._func(*args, **kwargs)
return r
The following two pieces of code are equivalent:
#Wait
def print_something(something):
print (something)
##################################################
def print_something(something):
print(something)
print_something = Wait(print_something)
We can print "something" to the console very slowly, as follows:
print_something("something")
#################################################
#Wait(delay=1)
def print_something_else(something_else):
print(something_else)
##################################################
def print_something_else(something_else):
print(something_else)
dd = DelayedDecorator(Wait, delay=1)
print_something_else = dd(print_something_else)
##################################################
print_something_else("something")
Final Notes
It may look like a lot of code, but you don't have to write the classes DelayedDecorator and PolyArgDecoratorMeta every-time. The only code you have to personally write something like as follows, which is fairly short:
from PolyArgDecoratorMeta import PolyArgDecoratorMeta
import time
class Wait(metaclass=PolyArgDecoratorMeta):
def __init__(i, func, delay = 2):
i._func = func
i._delay = delay
def __call__(i, *args, **kwargs):
time.sleep(i._delay)
r = i._func(*args, **kwargs)
return r
I have this code, whose purpose is to dedupe requests.
def dedup_requests(f):
pending = {}
#functools.wraps(f)
def wrapped(*args, **kwargs):
key = _make_call_key(args, kwargs)
if key not in pending:
pending[key] = gevent.spawn(f, *args, **kwargs)
result = pending[key].get()
if key in pending:
del pending[key]
return result
return wrapped
I suspect it is causing a deadlock somehow (this happens once in awhile, and I can't reproduce it).
It happens both when using threading and gevent.
Is the recurring use of get allowed?
Can this code even produce a deadlock when threading is not involved?
Note that it runs under other gevent tasks, so spawned tasks might spawn additional tasks, in case that's an issue.
Though I still don't exactly understand the source of the deadlock (my best guess is that get doesn't really work as expected when called more than once), this seems to work:
from gevent import lock
def queue_identical_calls(f, max_size=100):
pending = {}
#functools.wraps(f)
def wrapped(*args, **kwargs):
key = _make_call_key(args, kwargs)
if key not in pending:
pending[key] = lock.BoundedSemaphore(1)
lock_for_current_call = pending[key]
lock_for_current_call.acquire()
result = f(*args, **kwargs)
lock_for_current_call.release()
if len(pending) > max_size:
pending.clear()
return result
return wrapped
Your problem is that your code is not async. You need to have the function itself handle the key update and then in a while loop test for your values. This is an example of async working. You can prove it by noticing that the last element sometimes shows up first in the list.
import gevent
import random
pending = {}
def dedup_requests(key, *args, **kwargs):
global pending
if key not in pending:
gevent.spawn(ftest, key, *args, **kwargs)
def ftest(key, *args, **kwargs):
global pending
z = random.randint(1,7)
gevent.sleep(z)
pending[key] = z
return z
l = ['test','test2','test3']
for i in l:
dedup_requests(i)
while 1:
if set(pending.keys()) != set(l):
print(pending)
else:
print(pending)
break
gevent.sleep(1)
I am trying to monitor the latency of application using prometheus.There is a decorator function which calculates the time taken for a function to execute.Now as I wrap it with a cherrypy exposed endpoint there is no response from it.
I have also tried to use #cherrypy.tools.register('before_handler') over my decorator and then later attaching it as #cherrypy.tools.monitor_request() but it will through an argument exception as the decorator accepts a function.
def monitor_request(func):
def inner1(*args, **kwargs):
begin = time.time()
func(*args, **kwargs)
end = time.time()
diff = end-begin
REQUEST_LATENCY.labels(func.__name__).observe(diff)
REQUEST_COUNT.labels(func.__name__).inc()
return inner1
#cherrypy.expose
#monitor_request
def health1(self):
"""Give back health status"""
return "is_healthy"
I was not returning the result of response from cherrypy end point which was the issue.The correct code should be.
def inner1(*args, **kwargs):
begin = time.time()
x = func(*args, **kwargs)
end = time.time()
diff = end-begin
REQUEST_LATENCY.labels(func.__name__).observe(diff)
REQUEST_COUNT.labels(func.__name__).inc()
return x
return inner1
Here is my way,but I feel it is not very simple, any better way?
import asyncio
import time
def timer_all(f):
if asyncio.iscoroutinefunction(f):
async def wrapper(*args, **kwargs):
now = time.time()
result = await f(*args, **kwargs)
print('used {}'.format(time.time() - now))
return result
else:
def wrapper(*args, **kwargs):
now = time.time()
result = f(*args, **kwargs)
print('used {}'.format(time.time() - now))
return result
return wrapper
there is a lot of decorator, retry, add log etc,all will write this way,a bit ugly,right?
While there is no real problems with repeating the same code in specialized decorators.
Here is how I'll approach a refactoring.
I will use a class decorator that keeps the accepts a pre-call function and a post-call function,
both of which will be called with an instance of the decorator.
The result of the pre-call function will be saved to an attribute of the decorator.
This is necessary for the special timing case where a delta needs to be computed.
I guess there may be other examples that may require the return value of a pre-call function execution.
I also save the result of the decorated function executed to the result attribute of the decorator instance. This allows the post call function to read this value for logging.
Here is an example implementation:
import asyncio
class WrapAll(object):
def __init__(self, pre=lambda _: None, post=lambda _: None):
self.pre = lambda : pre(self)
self.pre_val = None
self.result = None
self.post = lambda : post(self)
def __call__(self, fn):
if asyncio.iscoroutinefunction(fn):
async def wrap(*args, **kwargs):
self.pre_val = self.pre()
self.result = await fn(*args, *kwargs)
self.post()
return self.result
else:
def wrap(*args, **kwargs):
self.pre_val = self.pre()
self.result = fn(*args, *kwargs)
self.post()
return self.result
return wrap
Timer Example
import asyncio
import time
timer = dict(
pre=lambda self: time.time(),
post=lambda self: print('used {}'.format(time.time()-self.pre_val))
)
#WrapAll(**timer)
def add(x, y):
return x + y
#WrapAll(**timer)
async def async_add(x, y):
future = asyncio.Future()
future.set_result(x+y)
await future
return future.result()
Running sync adder
>>> add(3, 4)
used 4.76837158203125e-06
7
Running async adder
>>> loop = asyncio.get_event_loop()
>>> task = asyncio.ensure_future(async_add(3, 4))
>>> try:
... loop.run_until_complete(task)
... except RuntimeError:
... pass
used 2.193450927734375e-05
Logging Example
import asyncio
import logging
FORMAT = '%(message)s'
logging.basicConfig(format=FORMAT)
logger = dict(
post=lambda self: logging.warning('subtracting {}'.format(self.result))
)
#WrapAll(**logger)
def sub(x, y):
return x - y
#WrapAll(**logger)
async def async_sub(x, y):
future = asyncio.Future()
future.set_result(x-y)
await future
return future.result()
Running sync subtractor:
>>> sub(5, 6)
subtracting -1
Running async subtractor:
>>> loop = asyncio.get_event_loop()
>>> task = asyncio.ensure_future(async_sub(5, 6))
>>> try:
... loop.run_until_complete(task)
... except RuntimeError:
... pass
subtracting -1
I wonder if there is a python hackish way to achieve the following:
I found myself using an assert like structure in my views a lot:
def view(request):
if not condition:
return HttpResponseServerError("error")
if not condition2:
return HttpResponseServerError("error2")
[...]
return HttpResponse("OK!")
So I thought about using an assert like function:
def view(request):
def err(msg=None):
msg = msg if msg else "Illegal Parameters"
resp = {"msg": msg}
resp = json.dumps(resp)
return HttpResponseServerError(resp)
def verify(exp, msg=None):
if not exp:
err(msg)
verify(condition, "error")
verify(condition2, "error2")
return HttpResponse("OK")
Obviously, this does not work, as the result of the error function is never returned. Furthermore, I would also need to return the Response all the way to the view function and run return verify(), which will make my code prevent from execution of course.
One possible solution would be a decorator that either returns an error or the view function after all asserts went through. However, I would like to prevent that, as I also need some of the values I am establishing (imagine parsing one number after another and then having to pass a list of numbers).
Another solution I could think of is to actually do use a decorator and make my function a generator, yielding the result of verify. The decorator is a loop over that generator and keeps going until a response is yielded.
But in this post I am really looking for a more hackish way, to let the nested function return a reponse instead of the parent function and therefore prevent execution.
I will post my yield "solution" in a separate answer so you can get the picture :)
What about an exception, and a nice decorator to catch it:
class AssertError(Exception):
pass
def assertLike(view):
def wrap(request, *args, **kwargs):
try:
return view(request, *args, **kwargs):
except AssertError as e:
return HttpResponseServerError(...)
return wrap
#assertLike
def createTask(request):
import json
....
if not exp:
raise AssertError()
....
return HttpResponse("Ok")
Here I present the generator based solution:
def assertLike(view):
def wrap(request, *args, **kwargs):
for response in view(request, *args, **kwargs):
if response:
return response
return wrap
#other_django_views
#another_django_view
#assertLike
def createTask(request):
import json
def err(msg=None):
msg = msg if msg else "Illegal Parameters"
resp = {"msg": msg}
resp = json.dumps(resp)
return HttpResponseServerError(resp)
def verify(exp, msg=None):
if not exp:
return err(msg)
# only react to ajax requests
yield verify(True, "This is not an error")
yield verify(False, "Here it should stop!")
yield HttpResponse("This is the final response!")