Pass self to decorator object - python

Having such decorator object
class wait_for_page_load(object):
def __init__(self, driver, time_to_wait=20):
self.driver = driver
self.time_to_wait = time_to_wait
def __call__(self, function):
#functools.wraps(function)
def wrapper(*args):
old_page = self.driver.find_element_by_tag_name('html')
function(*args)
WebDriverWait(self.driver, self.time_to_wait).until(staleness_of(old_page))
return wrapper
I want to apply it to method of another class, like this:
class VehiclePage(object):
def __init__(self, driver):
self.driver = driver
#wait_for_page_load(self.driver)
def open(self):
self.driver.get('%s/vehicles/' % BASE_URL)
This gives me an error. Is there a way to pass self.driver to decorator?

You don't have to pass self to a decorator object. If the decorator returns a function, then that function will get access to self when it is called. eg.
def pass_value(function):
def wrapper(self):
function(self, self.value)
return wrapper
class Printer(object):
def __init__(self, value):
self.value = value
#pass_value
def print_(self, v):
print v
Printer("blah").print_()
The one problem with this method is that it requires self to implement a specific interface (such as having a field called driver, rather than directly passing the driver to the decorator).
Your decorator would become:
def wait_for_page_load(time_to_wait=20):
def decorator(function):
#functools.wraps(function)
def wrapper(self, *args):
old_page = self.driver.find_element_by_tag_name('html')
function(self, *args)
WebDriverWait(self.driver, time_to_wait).until(staleness_of(old_page))
return wrapper
return decorator
Used as:
#wait_for_page_load() # brackets are needed
def open(self):
...

Short answer: No there is not.
Long answer:
The driver attribute is set when you instantiate the class. However, the decorator is run when the class is interpreted. That is, when the interpreter first reads it when loading the module. At this point, you don't have any instance ready. To do this kind of stuff you will have to refactor your code.
Also, even if that worked, you would end up using a single instance of your decorator class for all your objects. Probably not what you expected.
A simple workaround, though, could be to apply the decorator in __init__. Though not very elegant, that would work if you really need to apply the decorator.
def __init__(self, driver):
self.driver = driver
self.open = wait_for_page_load(self.driver)(self.open)
But then I believe you need to bind the wrapper to the class yourself by calling types.MethodType - honestly, it's probably better you just reorganize your code.

Related

Python decorator for accessing class variable

I have a decorator to control time limit, if the function execution exceeds limit, an error is raised.
def timeout(seconds=10):
def decorator(func):
# a timeout decorator
return decorator
And I want to build a class, using the constructor to pass the time limit into the class.
def myClass:
def __init__(self,time_limit):
self.time_limit = time_limit
#timeout(self.time_limit)
def do_something(self):
#do something
But this does not work.
File "XX.py", line YY, in myClass
#timeout(self.tlimit)
NameError: name 'self' is not defined
What's the correct way to implement this?
self.time_limit is only available when a method in an instance of your class is called.
The decorator statement, prefixing the methods, on the other hand is run when the class body is parsed.
However, the inner part of your decorator, if it will always be applied to methods, will get self as its first parameter - and there you can simply make use of any instance attribute:
def timeout(**decorator_parms):
def decorator(func):
def wrapper(self, *args, **kwargs):
time_limit = self.time_limit
now = time.time()
result = func(self, *args, **kwargs)
# code to check timeout
..
return result
return wrapper
return decorator
If your decorator is expected to work with other time limits than always self.limit you could always pass a string or other constant object, and check it inside the innermost decorator with a simple if statement. In case the timeout is a certain string or object, you use the instance attribute, otherwise you use the passed in value;
You can also decorate a method in the constructor:
def myClass:
def __init__(self,time_limit):
self.do_something = timeout(time_limit)(self.do_something)
def do_something(self):
#do something

Python decorator to determine order of execution of methods

I have a basic class Framework with 3 methods that can be set by the user: initialize, handle_event and finalize.
These methods are executed by the method run:
class Framework(object):
def initialize(self):
pass
def handle_event(self, event):
pass
def finalize(self):
pass
def run(self):
self.initialize()
for event in range(10):
self.handle_event(event)
self.finalize()
I would like to create 3 decorators: on_initialize, on_event and on_finalize so that I could write such a class:
class MyFramework(Framework):
# The following methods will be executed once in this order
#on_initialize(precedence=-1)
def say_hi(self):
print('say_hi')
#on_initialize(precedence=0)
def initialize(self):
print('initialize')
#on_initialize(precedence=1)
def start_processing_events(self):
print('start_processing_events')
# The following methods will be executed in each event in this order
#on_event(precedence=-1)
def before_handle_event(self, event):
print('before_handle_event:', event)
#on_event(precedence=0)
def handle_event(self, event):
print('handle_event:', event)
#on_event(precedence=1)
def after_handle_event(self, event):
print('after_handle_event:', event)
# The following methods will be executed once at the end on this order
#on_finalize(precedence=-1)
def before_finalize(self):
print('before_finalize')
#on_finalize(precedence=0)
def finalize(self):
print('finalize')
#on_finalize(precedence=1)
def after_finalize(self):
print('after_finalize')
if __name__ == '__main__':
f = MyFramework()
f.run()
These decorators determine the order of execution of the optional methods the user may add to the class. I think that by default, initialize, handle_event and finalize should take precedence=0. Then the user could add any method with the right decorator and he will know when they get executed in the simulation run.
I have honestly no idea how to get started with this problem. Any help to push me in the right direction will be very welcome! Many thanks.
If you are using Python 3.6, this is a case that can take advantage of the new __init_subclass__ method. It is called on the superclass by subclasses when they are created.
Withut Python3.6 you have to resort to a metaclass.
The decorator itself can just mark each method with the needed data.
def on_initialize(precedence=0):
def marker(func):
func._initializer = precedence
return func
return marker
def on_event(precedence=0):
def marker(func):
func._event_handler = precedence
return func
return marker
def on_finalize(precedence=0):
def marker(func):
func._finalizer = precedence
return func
return marker
class Framework:
def __init_subclass__(cls, *args, **kw):
super().__init_subclass__(*args, **kw)
handlers = dict(_initializer=[], _event_handler=[], _finalizer=[])
for name, method in cls.__dict__.items():
for handler_type in handlers:
if hasattr(method, handler_type):
handlers[handler_type].append((getattr(method, handler_type), name))
for handler_type in handlers:
setattr(cls, handler_type,
[handler[1] for handler in sorted(handlers[handler_type])])
def initialize(self):
for method_name in self._initializer:
getattr(self, method_name)()
def handle_event(self, event):
for method_name in self._event_handler:
getattr(self, method_name)(event)
def finalize(self):
for method_name in self._finalizer:
getattr(self, method_name)()
def run(self):
self.initialize()
for event in range(10):
self.handle_event(event)
self.finalize()
If you will have a complex class hierarchy that should inherit the action methods properly, you wll have to merge the lists in the handlers dictionary with the ones in the superclass (get the superclass as cls.__mro__[1]) before applying then as class attributes.
Also, if you are using any Python < 3.6, you will need to move the logic on __init_subclass__ to a metaclass. Just put the code as it is on the __init__ method of a metaclass (and adjust the incoming parameters and super call as apropriate), and it should work just the same.
My idea is to use class based decorators, which are simple and gives intermediate context to share between decorated functions. So decorator would look like this (I am using python3.5):
class OnInitialize:
methods = {}
def __init__(self, precedence):
self.precedence = precedence
def __call__(self, func):
self.methods[self.precedence] = func
def wrapper(*a, **kw):
for precedence in sorted(self.methods.keys()):
self.methods[precedence](*a, **kw)
return wrapper
on decoration, first of all init is executed and it stores the precedence value for further use. Secondly the call is executed which just appends target function to the methods dictionary (Please note that call and methods structure could be customized to allow calling multiple methods with same precedence).
on the other hand, target class and methods would look like this
class Target:
#OnInitialize(precedence=-1)
def say_hi(self):
print("say_hi")
#OnInitialize(precedence=0)
def initialize(self):
print("initialize")
#OnInitialize(precedence=1)
def start_processing_events(self):
print("start_processing_events")
which ensures that, if one of the following methods are called, it will call all the decorated methods with given order.
target = Target()
target.initialize()
Hope it helps, please comment me below if you were interested in something other.

Proxy class can't call methods on child

I'm writing a Python class to wrap/decorate/enhance another class from a package called petl, a framework for ETL (data movement) workflows. Due to design constraints I can't just subclass it; every method call has to be sent through my own class so I can control what kind of objects are being passed back. So in principle this is a proxy class, but I'm having some trouble using existing answers/recipes out there. This is what my code looks like:
from functools import partial
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name)
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""this returns a partial referencing the child method"""
petl_attr = getattr(self.petl_tbl, name, None)
if petl_attr and callable(petl_attr):
return partial(self.call_petl_method, func=petl_attr)
raise NotImplementedError('Not implemented')
def call_petl_method(self, func, *args, **kwargs):
func(*args, **kwargs)
Then I try to instantiate a table and call something:
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world')
This gives a TypeError: call_petl_method() got multiple values for argument 'func'.
This only happens with positional arguments; kwargs seem to be fine. I'm pretty sure it has to do with self not being passed in, but I'm not sure what the solution is. Can anyone think of what I'm doing wrong, or a better solution altogether?
This seems to be a common issue with mixing positional and keyword args:
TypeError: got multiple values for argument
To get around it, I took the positional arg func out of call_petl_method and put it in a kwarg that's unlikely to overlap with the kwargs of the child function. A little hacky, but it works.
I ended up writing a Proxy class to do all this generically:
class Proxy(object):
def __init__(self, child):
self.child = child
def __getattr__(self, name):
child_attr = getattr(self.child, name)
return partial(self.call_child_method, __child_fn__=child_attr)
#classmethod
def call_child_method(cls, *args, **kwargs):
"""
This calls a method on the child object and wraps the response as an
object of its own class.
Takes a kwarg `__child_fn__` which points to a method on the child
object.
Note: this can't take any positional args or they get clobbered by the
keyword args we're trying to pass to the child. See:
https://stackoverflow.com/questions/21764770/typeerror-got-multiple-values-for-argument
"""
# get child method
fn = kwargs.pop('__child_fn__')
# call the child method
r = fn(*args, **kwargs)
# wrap the response as an object of the same class
r_wrapped = cls(r)
return r_wrapped
This will also solve the problem. It doesn't use partial at all.
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name))
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""Looks-up named attribute in class of the petl_tbl object."""
petl_attr = self.petl_tbl.__class__.__dict__.get(name, None)
if petl_attr and callable(petl_attr):
return petl_attr
raise NotImplementedError('Not implemented')
if __name__ == '__main__':
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world') # -> Hello, world!

Python initializing a memoizing decorator with settings

I have a memoizer decorator class in a library, as such:
class memoizer(object):
def __init__(self, f):
"some code here"
def __call__(self, *args, **kwargs):
"some code here"
When I use it for functions in the library, I use #memoizer. However, I'd like to have the client (ie the programmer using the library) initialize the memoization class from outside the library with some arguments so that it holds for all uses of the decorator used in the client program. Particularly, this particular memoization class saves the results to a file, and I want the client to be able to specify how much memory the files can take. Is this possible?
You can achieve this using decorator factory:
class DecoratorFactory(object):
def __init__(self, value):
self._value = value
def decorator(self, function):
def wrapper(*args, **kwargs):
print(self._value)
return function(*args, **kwargs)
return wrapper
factory = DecoratorFactory("shared between all decorators")
#factory.decorator
def dummy1():
print("dummy1")
#factory.decorator
def dummy2():
print("dummy2")
# prints:
# shared between all decorators
# dummy1
dummy1()
# prints:
# shared between all decorators
# dummy2
dummy2()
If you don't like factories you can create global variables within some module and set them before usage of our decorators (not nice solution, IMO factory is more clean).

Decorating an instance method and calling it from the decorator

I am using nose test generators feature to run the same test with different contexts. Since it requires the following boiler plate for each test:
class TestSample(TestBase):
def test_sample(self):
for context in contexts:
yield self.check_sample, context
def check_sample(self, context):
"""The real test logic is implemented here"""
pass
I decided to write the following decorator:
def with_contexts(contexts=None):
if contexts is None:
contexts = ['twitter', 'linkedin', 'facebook']
def decorator(f):
#wraps(f)
def wrapper(self, *args, **kwargs):
for context in contexts:
yield f, self, context # The line which causes the error
return wrapper
return decorator
The decorator is used in the following manner:
class TestSample(TestBase):
#with_contexts()
def test_sample(self, context):
"""The real test logic is implemented here"""
var1 = self.some_valid_attribute
When the tests executed an error is thrown specifying that the attribute which is being accessed is not available. However If I change the line which calls the method to the following it works fine:
yield getattr(self, f.__name__), service
I understand that the above snippet creates a bound method where as in the first one self is passed manually to the function. However as far as my understanding goes the first snippet should work fine too. I would appreciate if anyone could clarify the issue.
The title of the question is related to calling instance methods in decorators in general but I have kept the description specific to my context.
You can use functools.partial to tie the wrapped function to self, just like a method would be:
from functools import partial
def decorator(f):
#wraps(f)
def wrapper(self, *args, **kwargs):
for context in contexts:
yield partial(f, self), context
return wrapper
Now you are yielding partials instead, which, when called as yieldedvalue(context), will call f(self, context).
As far as I can tell, some things don't fit together. First, your decorator goes like
def with_contexts(contexts=None):
if contexts is None:
contexts = ['twitter', 'linkedin', 'facebook']
def decorator(f):
#wraps(f)
def wrapper(self, *args, **kwargs):
for context in contexts:
yield f, self, context # The line which causes the error
return wrapper
return decorator
but you use it like
#with_contexts
def test_sample(self, context):
"""The real test logic is implemented here"""
var1 = self.some_valid_attribute
This is wrong: this calls with_context(test_sample), but you need with_context()(test_sample). So do
#with_contexts()
def test_sample(self, context):
"""The real test logic is implemented here"""
var1 = self.some_valid_attribute
even if you don't provide the contexts argument.
Second, you decorate the wrong function: your usage shows that the test function yields the check function for each context. The function you want to wrap does the job of the check function, but you have to name it after the test function.
Applying self to a method can be done with partial as Martijn writes, but it can as well be done the way Python does it under the hood: with
method.__get__(self, None)
or maybe better
method.__get__(self, type(self))
you can achieve the same. (Maybe your original version works as well, with yielding the function to be called and the arguments to use. It was not clear to me that this is the way it works.)

Categories