I have written a Python class for parsing a specialized text format.
class Parser(object):
def __init__(self):
# Initialize parser instance
def parseFile(self , filename):
pass
def modifyParser(self , *args , **kwargs):
pass
#Classmethod has same name as instance method - this does not work.
#classmethod
def parseFile(cls , filename)
parser = Parser( )
return parser.parseFile( filename )
As indicated the parser can be modified with the modifyParser method - but in most cases I will just use the Parser instance as it comes from the Parser.__init__(). I would like to be able to do this:
# Parse file using 'custom' parser:
parser = Parser( )
parser.modifyParser( ... )
result = parser.parseFile("file.input")
# Parse using the default parser - do not explicitly instantiate an object:
result = Parser.parseFile("file.input")
This requires that the parseFile( ) method can be called both as an instance method - with a self - and as a classmethod. Is this possible? Bad form?
If I were you, I'd offer two distinct functions:
mymodule.Parser().parseFile() (instance method), and
mymodule.parseFile() (module-level function that uses the default instance).
This is what happens for example with the standard json module, where you have json.JSONDecoder().decode() and json.loads(). Offering two distinct functions makes the code less ambiguous, more explicit and more predictable (in my opinion).
However, yes: what you want to do is possible. You have to implement your own descriptor using __get__. Here's an example:
from functools import partial
class class_and_instance_method(object):
def __init__(self, func):
self.func = func
def __get__(self, obj, type=None):
first_arg = obj if obj is not None else type
return partial(self.func, first_arg)
class Parser(object):
#class_and_instance_method
def parseFile(self):
if isinstance(self, type):
print('using default parser')
else:
print('using the current instance')
>>> Parser.parseFile()
using default parser
>>> p = Parser()
>>> p.parseFile()
using the current instance
You'll have to use two separate names. In python due to it's dynamic nature there's no operator overloading as in C++, when one function has same name with different arguments.
When you say def in your script, you tell Python "set the following object(function object) to this name". So in your code you just redefine the name to reference classmethod and your instance method function object is lost.
Solution: use different names for instace method and class method.
Related
Often in Python it is helpful to make use of duck typing, for instance, imagine I have an object spam, whose prompt attribute controls the prompt text in my application. Normally, I would say something like:
spam.prompt = "fixed"
for a fixed prompt. However, a dynamic prompt can also be achived - while I can't change the spam class to use a function as the prompt, thanks to duck typing, because the userlying spam object calls str, I can create a dynamic prompt like so:
class MyPrompt:
def __str__( self ):
return eggs.get_user_name() + ">"
spam.prompt = MyPrompt()
This principal could be extended to make any attribute dynamic, for instance:
class MyEnabled:
def __bool__( self ):
return eggs.is_logged_in()
spam.enabled = MyEnabled()
Sometimes though, it would be more succinct to have this inline, i.e.
spam.prompt = lambda: eggs.get_user_name() + ">"
spam.enabled = eggs.is_logged_in
These of course don't work, because neither the __str__ of the lambda or the __bool__ of the function return the actual value of the call.
I feel like a solution for this should be simple, am I missing something, or do I need to wrap my function in a class every time?
What you want are computed attributes. Python's support for computed attributes is the descriptor protocol, which has a generic implementation as the builtin property type.
Now the trick is that, as documented (cf link above), descriptors only work when they are class attributes. Your code snippet is incomplete as it doesn't contains the definition of the spam object but I assume it's a class instance, so you cannot just do spam.something = property(...) - as the descriptor protocol wouldn't then be invoked on property().
The solution here is the good old "strategy" design pattern: use properties (or custom descriptors, but if you only have a couple of such attributes the builtin property will work just fine) that delegates to a "strategy" function:
def default_prompt_strategy(obj):
return "fixed"
def default_enabled_strategy(obj):
return False
class Spam(object):
def __init__(self, prompt_strategy=default_prompt_strategy, enabled_strategy=default_enabled_strategy):
self.prompt = prompt_strategy
self.enabled = enabled_strategy
#property
def prompt(self):
return self._prompt_strategy(self)
#prompt.setter
def prompt(self, value):
if not callable(value):
raise TypeError("PromptStrategy must be a callable")
self._prompt_strategy = value
#property
def enabled(self):
return self._enabled_strategy(self)
#enabled.setter
def enabled(self, value):
if not callable(value):
raise TypeError("EnabledtStrategy must be a callable")
self._enabled_strategy = value
class Eggs(object):
def is_logged_in(self):
return True
def get_user_name(self):
return "DeadParrot"
eggs = Eggs()
spam = Spam(enabled_strategy=lambda obj: eggs.is_logged_in())
spam.prompt = lambda obj: "{}>".format(eggs.get_user_name())
Disclaimer:
Before reading this post know that I am trying to do something that is unconventional in python. Since "Don't do x" is not an answer to "how do I do x?" let's assume there is a very good reason to do this, even though in most cases it would not be good practice.
The Question
Given I have a class that is dynamically created by applying a decorator to a function, how would I go about pickling an instance of said class?
For example, to set this up it might look like this:
import inspect
from functools import wraps
class BaseClass:
pass
def _make_method(func):
""" decorator for adding self as first argument to function """
#wraps(func)
def decorator(self, *args, **kwargs):
return func(*args, **kwargs)
# set signature to include self
sig = inspect.signature(decorator)
par = inspect.Parameter('self', 1)
new_params = tuple([par] + list(sig.parameters.values()))
new_sig = sig.replace(parameters=new_params,
return_annotation=sig.return_annotation)
decorator.__signature__ = new_sig
return decorator
def snake2camel(snake_str):
""" convert a snake_string to a CamelString """
return "".join(x.title() for x in snake_str.split('_'))
def make_class(func):
""" dynamically create a class setting the call method to function """
name = snake2camel(func.__name__) # get the name of the new class
method = _make_method(func)
cls = type(name, (BaseClass,), {'__call__': method})
return cls()
#make_class
def something(arg):
return arg
Now something is an instance of the dynamically created class Something.
type(something) # -> __main__.Something
isinstance(something, BaseClass) # -> True
which works fine, but when I try to pickle it (or use the multiprocessing module which uses pickle under the hood):
import pickle
pickle.dumps(something) # -> raises
it throws this error:
# PicklingError: Can't pickle <class '__main__.Something'>: attribute lookup Something on __main__ failed
So I thought I could redefine BaseClass to use a reduce method like so:
class BaseClass:
def __reduce__(self):
return make_class, (self.__call__.__func__,)
but then it throws the dreaded "not the same object" error:
# PicklingError: Can't pickle <function something at 0x7fe124cb2d08>: it's not the same object as __main__.something
How can I make this work without bringing in dependencies? I need to be able to pickle the something object so I can use it with the ProcessPoolExecutor class from the concurrent.futures module in python 3.6, so simply using dill or cloudpickle is probably not an option here.
I'm writing a Python class to wrap/decorate/enhance another class from a package called petl, a framework for ETL (data movement) workflows. Due to design constraints I can't just subclass it; every method call has to be sent through my own class so I can control what kind of objects are being passed back. So in principle this is a proxy class, but I'm having some trouble using existing answers/recipes out there. This is what my code looks like:
from functools import partial
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name)
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""this returns a partial referencing the child method"""
petl_attr = getattr(self.petl_tbl, name, None)
if petl_attr and callable(petl_attr):
return partial(self.call_petl_method, func=petl_attr)
raise NotImplementedError('Not implemented')
def call_petl_method(self, func, *args, **kwargs):
func(*args, **kwargs)
Then I try to instantiate a table and call something:
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world')
This gives a TypeError: call_petl_method() got multiple values for argument 'func'.
This only happens with positional arguments; kwargs seem to be fine. I'm pretty sure it has to do with self not being passed in, but I'm not sure what the solution is. Can anyone think of what I'm doing wrong, or a better solution altogether?
This seems to be a common issue with mixing positional and keyword args:
TypeError: got multiple values for argument
To get around it, I took the positional arg func out of call_petl_method and put it in a kwarg that's unlikely to overlap with the kwargs of the child function. A little hacky, but it works.
I ended up writing a Proxy class to do all this generically:
class Proxy(object):
def __init__(self, child):
self.child = child
def __getattr__(self, name):
child_attr = getattr(self.child, name)
return partial(self.call_child_method, __child_fn__=child_attr)
#classmethod
def call_child_method(cls, *args, **kwargs):
"""
This calls a method on the child object and wraps the response as an
object of its own class.
Takes a kwarg `__child_fn__` which points to a method on the child
object.
Note: this can't take any positional args or they get clobbered by the
keyword args we're trying to pass to the child. See:
https://stackoverflow.com/questions/21764770/typeerror-got-multiple-values-for-argument
"""
# get child method
fn = kwargs.pop('__child_fn__')
# call the child method
r = fn(*args, **kwargs)
# wrap the response as an object of the same class
r_wrapped = cls(r)
return r_wrapped
This will also solve the problem. It doesn't use partial at all.
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name))
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""Looks-up named attribute in class of the petl_tbl object."""
petl_attr = self.petl_tbl.__class__.__dict__.get(name, None)
if petl_attr and callable(petl_attr):
return petl_attr
raise NotImplementedError('Not implemented')
if __name__ == '__main__':
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world') # -> Hello, world!
The scenerio is I'm using an arg parser to get a command line argument auth_application.
auth_application command can have many values, for example:
cheese
eggs
noodles
pizza
These values are related to a programmable class.
I'd like a way to name the class, possible using a decorator.
So I can say
if auth_application is Cheese.__name__:
return Cheese()
Currently I maintain a tuple of auth_application names and have to expose that to my arg parser class as well as import the classes I need.
Anyways to make this better? Is there a decorator for classes to name them?
I'm looking for a python 2.7 solution, but a python 3 solution might be useful to know.
Easy peasy.
class command(object):
map = {}
def __init__(self, commandname):
self.name = commandname
def __call__(self, cls):
command.map[self.name] = cls
return cls
class NullCommand(object):
pass
#command('cheese')
class Cheese(object):
pass
#command('eggs')
class Eggs(object):
pass
def func(auth_application):
return command.map.get(auth_application, command.NullCommand)()
You can just keep a sinlge list of all of your "allowed classes" and iterate over that to find the class being referred to from the command line.
allow_classes = [Cheese,Eggs,Noodles,Pizza]
for cls in allow_classes:
if auth_application.lower() is cls.__name__.lower():
return cls()
Absolutely you can! You need to understand class attributes.
class NamedClass(object):
name = "Default"
class Cheese(NamedClass):
name = "Cheese"
print(Cheese.name)
> Cheese
You can use the standard Inspect Library to get the real class names, without having to augment your classes with any extra data - and this works for any class, in any module - even if you don't have the source code.
For instance - to list all the classes defined in mymodule :
import mymodule
import inspect
for name, obj in inspect.getmembers(mymodule, inspect.isclass):
print name
the obj variable is a real class object - which you can use to declare an instance, access class methods etc.
To get the definition of a class by it's name string - you can write a simple search function :
import mymodule
import inspect
def find_class(name):
"""Find a named class in mymodule"""
for this_name, _cls_ in inspect.getmembers(mymodule, inspect.isclass):
if this_name = name:
return _cls_
return None
....
# Create an instance of the class named in auth_application
find_class(auth_application)(args, kwargs)
NB: Code snippets not tested
Recently, I faced a problem which was similar to this question:
Accessing the class that owns a decorated method from the decorator
My rep was not high enough to comment there, so I am starting a new question to address some improvements to the answer to that problem.
This is what I needed:
def original_decorator(func):
# need to access class here
# for eg, to append the func itself to class variable "a", to register func
# or say, append func's default arg values to class variable "a"
return func
class A(object):
a=[]
#classmethod
#original_decorator
def some_method(self,a=5):
''' hello'''
print "Calling some_method"
#original_decorator
def some_method_2(self):
''' hello again'''
print "Calling some_method_2"
The solution would need to work both with class methods and instance methods, the method returned from the decorator should work and behave just the same way if it was undecorated i.e. method signature should be preserved.
The accepted answer for that question returned a Class from the decorator and the metaclass identified that specific Class, and did the "class-accessing" operations.
The answer did mention itself as a rough solution, but clearly it had a few caveats :
Decorator returned a class and it was not callable. Obviously, it can be made callable easily, but the returned value is still a class - it just behaves the same way while calling, but its properties and behaviors would be different. Essentially, it would not work the same way as the undecorated method.
It forced the decorator to return a custom-type class and all the "class-accessing" code was put inside the metaclass directly. It is simply not nice, writing the decorator should not enforce touching the metaclass directly.
I have tried to come up with a better solution, documented in the answer.
Here is the solution.
It uses a decorator (which would work on "class-accessing" decorators) and a metaclass, which would fulfill all my requirements and address the problems of that answer. Probably the best advantage is that the "class-accessing" decorators can just access the class, without even touching the metaclass.
# Using metaclass and decorator to allow class access during class creation time
# No method defined within the class should have "_process_meta" as arg
# Potential problems: Using closures, function.func_globals is read-only
from functools import partial
import inspect
class meta(type):
def __new__(cls, name, base, clsdict):
temp_cls = type.__new__(cls, name, base, clsdict)
methods = inspect.getmembers(temp_cls, inspect.ismethod)
for (method_name, method_obj) in methods:
tmp_spec = inspect.getargspec(method_obj)
if "__process_meta" in tmp_spec.args:
what_to_do, main_func = tmp_spec.defaults[:-1]
f = method_obj.im_func
f.func_code, f.func_defaults, f.func_dict, f.func_doc, f.func_name = main_func.func_code, main_func.func_defaults, main_func.func_dict, main_func.func_doc, main_func.func_name
mod_func = what_to_do(temp_cls, f)
f.func_code, f.func_defaults, f.func_dict, f.func_doc, f.func_name = mod_func.func_code, mod_func.func_defaults, mod_func.func_dict, mod_func.func_doc, mod_func.func_name
return temp_cls
def do_it(what_to_do, main_func=None):
if main_func is None:
return partial(do_it, what_to_do)
def whatever(what_to_do=what_to_do, main_func=main_func, __process_meta=True):
pass
return whatever
def original_classmethod_decorator(cls, func):
# cls => class of the method
# appends default arg values to class variable "a"
func_defaults = inspect.getargspec(func).defaults
cls.a.append(func_defaults)
func.__doc__ = "This is a class method"
print "Calling original classmethod decorator"
return func
def original_method_decorator(cls, func):
func_defaults = inspect.getargspec(func).defaults
cls.a.append(func_defaults)
func.__doc__ = "This is a instance method" # Can change func properties
print "Calling original method decorator"
return func
class A(object):
__metaclass__ = meta
a = []
#classmethod
#do_it(original_classmethod_decorator)
def some_method(cls, x=1):
''' hello'''
print "Calling original class method"
#do_it(original_method_decorator)
def some_method_2(self, y=2):
''' hello again'''
print "Calling original method"
# signature preserved
print(inspect.getargspec(A.some_method))
print(inspect.getargspec(A.some_method_2))
Open to suggestions on whether this approach has any ceveats.