I need to be able to dynamically invoke a method on a class that accepts various parameters based on the string name and a dictionary of variables. I know how to find the signature with the inspect module, and I can get the method with the getattr, but I do not know how to assign the parameters in the correct order to invoke it in a purely dynamic way.
class MyClass():
def call_me(a, b, *args, foo='bar', **kwargs):
print('Hey, I got called!')
command = {
'action':'call_me',
'parameters':{
'a': 'Apple',
'b': 'Banana',
'args':['one','two','three','four'],
'foo':'spam',
'clowns':'bad',
'chickens':'good'
}
}
me = MyClass()
action = getattr(me,command['action'])
... now what?
I need to be able to dynamically call this function as if this code were used, without any foreknowledge of the actual parameters for the method:
a = command['parameters']['a']
b = command['parameters']['b']
args = command['parameters']['args']
foo = command['parameters']['foo']
kwargs = {
'clowns': command['parameters']['clowns'],
'chickens':command['parameters']['chickens']
}
value = action(a, b, *args, foo=foo, **kwargs)
Surely there is a good pythonic way to do this.
Edit: Fixed getattr to call instance of MyClass instead of MyClass directly.
This is the best way I have found so far to capture every possible combination of normal args, *args, keyword args and **kwargs without getting any errors:
import inspect
class MyClass():
def a(self):
pass
def b(self,foo):
pass
def c(self,foo,*extras):
pass
def d(self,foo,food='spam'):
pass
def e(self,foo,**kwargs):
pass
def f(self,foo,*extras,food='spam'):
pass
def g(self,foo,*extras,**kwargs):
pass
def h(self,foo,*extras,food='spam',**kwargs):
pass
def i(self,*extras):
pass
def j(self,*extras,food='spam'):
pass
def k(self,*extras,**kwargs):
pass
def l(self,*extras,food='spam',**kwargs):
pass
def m(self,food='spam'):
pass
def n(self,food='spam',**kwargs):
pass
def o(self,**kwargs):
pass
def dynamic_invoke(obj,name,parameters):
action = getattr(obj,name)
spec = inspect.getfullargspec(action)
used = []
args = ()
kwargs = {}
for a in spec.args[1:]:
# skip the "self" argument since we are bound to a class
args += (parameters[a], )
used.append(a)
if spec.varargs:
args += tuple(parameters[spec.varargs])
used.append(spec.varargs)
for kw in spec.kwonlyargs:
try:
kwargs[kw] = parameters[kw]
used.append(kw)
except KeyError:
pass
# pass remaining parameters to kwargs, if allowed
if spec.varkw:
for k,v in parameters.items():
if k not in used:
kwargs[k] = v
return action(*args,**kwargs)
me = MyClass()
params = {
'foo':'bar',
'extras':['one','two','three','four'],
'food':'eggs',
'parrot':'blue'
}
dynamic_invoke(me,'a',params)
dynamic_invoke(me,'b',params)
dynamic_invoke(me,'c',params)
dynamic_invoke(me,'d',params)
dynamic_invoke(me,'e',params)
dynamic_invoke(me,'f',params)
dynamic_invoke(me,'g',params)
dynamic_invoke(me,'h',params)
dynamic_invoke(me,'i',params)
dynamic_invoke(me,'j',params)
dynamic_invoke(me,'k',params)
dynamic_invoke(me,'l',params)
dynamic_invoke(me,'m',params)
dynamic_invoke(me,'n',params)
dynamic_invoke(me,'o',params)
print('done!')
Try like this:
action = getattr(me,command['action'])
action(**{'a': 'Apple',
'b': 'Banana',
'args':['one','two','three','four'],
'foo':'spam',
'clowns':'bad',
'chickens':'good'
})
Related
I have some classes FooA and FooB which are basically a collection of "static" methods. They operate on data - let's say it is an DataItem object:
# Base class with common behavior
class FooBase:
#classmethod
def method1(cls, arg, data: DataItem):
#res = ...
return res
#classmethod
def method2(cls, arg1, arg2, data: DataItem):
# res = ... # using method1
return res
# specialized classes
class FooA(FooBase):
# define extra methods
pass
class FooB(FooBase):
# define extra methods
pass
# usage 1: as "static methods"
res = FooA.method1(arg, data)
res2 = FooB.method2(args, data)
Now, I'd like to use these classes as attributes of a "managing" class (MyApp) which also has access to a datasource and should implicitly supply DataItems to the static methods of FooA and FooB. Moreover, the datasource supplies a list of DataItem objects.
# usage 2: as part of an "App" class
# here, the "data" argument should be supplied implicitly by MyApp
# also: MyApp contains a list of "data" objects
class MyApp:
def __init__(self, datasrc):
self.datasrc = datasrc
# this could be a generator
def get_data(self, key) -> List[DataItem]:
return self.datasrc.get_data(key)
# FooA, FooB as class / instance level attributes, descriptors, ???
# usage
my_app = MyApp("datasrc")
res_list = my_app.foo_a.method1(arg) # foo_a is a FooA obj, "data" arg is supplied automatically
# optionally, but not necessarily call as a static attribute:
res = MyApp.foo_a.method1(arg, data: DataItem) # same as FooA.method1(arg, data)
I have tried different things but found not satisfactory solution.
So... I am not sure can it be done in nice way, I thought about that and all approaches has serious drawbacks. One of the problem is we actually want to have a method that returns list or single item, depending on input parameters, which is bad.
One of way could be store datasrc in FooBase, but it violates SRP
class FooBase:
def __init__(self, datasrc):
FooBase.datasrc = datasrc
#classmethod
def method1(cls, arg, data=None):
if data is None:
return [cls.method1(arg, d) for d in cls.datasrc]
return data
Or use isinstance
#classmethod
def method1(cls, arg, data):
if isinstance(data, list):
return [cls.method1(arg, d) for d in data]
return data
But it forces us to adjust every method (which could be done with decorator or metaclass).
Another way could be use some intermediate layer:
def decorator(datasrc):
def wrapper(foo):
def f(*args, **kwargs):
# We could catch TypeError here to serve case when data is passed
return [foo(*args, **kwargs, data=data) for data in datasrc]
return f
return wrapper
class FooAdapter:
def __init__(self, datasrc, foo_cls):
self.datasrc = datasrc
methods = [
getattr(foo_cls, m)
for m in dir(foo_cls)
if callable(getattr(foo_cls, m)) and not m.startswith("__")
] # all methods of our Foo class
for method in methods:
setattr(self, method.__name__, decorator(datasrc)(method))
class MyApp:
def __init__(self, datasrc):
self.datasrc = datasrc
self.foo_a = FooAdapter(datasrc, FooA)
self.foo_b = FooAdapter(datasrc, FooB)
But solution with dynamically added functions breaks IDE support.
The cleanest solution imo could be to have Enum for Foo methods and Enum for Foo classes, then you could write code in MyApp
def get_bulk(m: MethodEnum, f: FooEnum, *args):
return [getattr(enum_to_cls_mapping[f], m)(*args, data=d) for d in self.datasrc]
For example:
#attrs
class Foo:
a = attrib()
f = Foo(a=1, b=2)
Code above will throw an error because class Foo doesn't have b attr. But I want to discard passed b value as if I just called f = Foo(a=1). In my use case I have dynamic dict (which I want to transform into attr-class) and I simply do not need some of the keys.
I think I figured out a more elegant solution which allows you to take advantage of the features of attrs while also tweaking the __init__ logic. See attrs documentation for more info.
#attr.s(auto_attribs=True, auto_detect=True)
class Foo():
a: int
optional: int = 3
def __init__(self,**kwargs):
filtered = {
attribute.name: kwargs[attribute.name]
for attribute in self.__attrs_attrs__
if attribute.name in kwargs
}
self.__attrs_init__(**filtered)
The code above allows you to specify extraneous keyword args. It also allows for optional args.
>>> Foo(a = 1, b = 2)
Foo(a=1, optional=3)
attrs detects the explicit init method (due to auto_detect=True) and still creates the init function, but calls it __attrs_init__. This allows you do define your own init function to do preprocessing and then call __attrs_init__ when you are done.
>>> import inspect
>>> print(inspect.getsource(Foo.__attrs_init__))
def __attrs_init__(self, a, optional=attr_dict['optional'].default):
self.a = a
self.optional = optional
class FromDictMixin:
#classmethod
def from_dict(cls, data: dict):
return cls(**{
a.name: data[a.name]
for a in cls.__attrs_attrs__
})
#attrs
class Foo(FromDictMixin):
a = attrib()
It works, but it looks kinda ugly. I was hopping that attrs lib had out of the box solution.
This seems to be more of a question of serialization/deserialization/validation and attrs is quite strict on its argument for multiple reasons. One of them is typing (as in types, not pressing keys :)) and the other is robustness/debugabiity. Ignoring arguments that you might have just misspelt can lead to very frustrating moments. It's better to move this kind of stuff into a separate layer.
You can find some possible tools for that in https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs.
I had to do something similar but I didn't want to write a custom __init__ method for every class. So I created a decorator where it would attach an __init__ method to the class before instantiation then wrap in attrs.define decorator.
This is just an example but does what you want.
import attrs
def define(cls):
def __init__(cls, **kwargs):
filtered = {}
for attr in cls.__attrs_attrs__:
if attr.name in kwargs:
filtered[attr.name] = kwargs[attr.name]
cls.__attrs_init__(**filtered)
def wrapper(*args, **kwargs):
nonlocal cls
cls.__init__ = __init__
cls = attrs.define(cls)
return cls(*args, **kwargs)
return wrapper
#define
class Booking:
id: int
id_hash: str
booking = {"id": 1, "id_hash": "a3H33lk", "foo": "bar"}
b = Booking(**booking)
print(b)
# Booking(id=1, id_hash='a3H33lk')
I have a class that has multiple methods and I want to store all of the available methods that would be easily accessible in example would be something like this
class Methods:
def foo(self, a):
return f'hello {a}'
def bar(self, b):
return f'hello {b}'
def methods_dict(self, var, **kwargs):
dic = {
'me' : self.foo(var),
'be': self.bar(var)
}
return dic
But on runtime my methods_dict() method will execute both of the methods inside of it's dictionary.
One one hand I'm planing to store only strings in there and it's really easily accessible, on the other hand i probably would not need to access all of the available methods at once.
Any suggestions ?
I am planning to use those methods as follows
class InheritMethods(Methods):
def __init__(self, method_name):
self.method_name = method_name
def add_to_list(self, input):
arr = []
arr.append(self.method_dicts(input)[self.method_name]
return arr
To clear things up, I am gonna call specific method based on input name, so basically input == method_name
I could do conditional statements like if input == 'foo': do somethings.., but if i end up having a lot of methods, my code is going to be a mess, so i assume(!) that would not be a great idea
I think you can get what you want with the following. Your exact usecase is still not clear. Please respond if I am heading in the wrong direction.
Using self.__getattribute__() you can get a function by name. Of course you would have to catch exceptions etc.
class Methods:
def foo(self, a):
return f'hello {a}'
def bar(self, b):
return f'hello {b}'
class InheritMethods(Methods):
def __init__(self, method_name):
self.method_name = method_name
def add_to_list(self, method_name, input):
method = getattr(self, method_name)
result = method(input)
return [result]
class InheritSingleMethod(Methods):
def __init__(self, method_name):
self.add_to_list = self.getattr(self, method_name)
Output
# Any method version
inherit_methods = InheritMethods('a') # < no use for that argument right?
inherit_methods.add_to_list('foo', 'laurens')
> ['hello laurens']
# Single method version
inherit_single_method = InheritSingleMethod('foo')
inherit_single_method.add_to_list('laurens')
> 'hello laurens'
If all you want to do is access a method of Methods given the name in a str, use getattr:
name = input()
m = Methods()
getattr(m, name)("bob")
I would like to extend a larger set of classes to accept an additional keyword argument. The set of classes all share a common interface which I cannot change.
I have tried it like this:
class Base:
def __init__(self, a=0, b=1, c=2):
self.a = a
self.b = b
self.c = c
def extended_base ( Kls ):
additional_fields = [ 'a', 'b' ]
def my_init( self, *args, **kwargs ):
for field in additional_fields:
try:
setattr(self, '{}2'.format(field), kwargs.pop(field))
except KeyError:
setattr(self, '{}2'.format(field), None)
# Does not work as I expect
# super().__init__(*args, **kwargs)
# Does not work as expected
super(Kls, self).__init__(*args, **kwargs)
# I would like to define a function that returns a function that does
# what the following functions do, automatically but for every value
# in `additional_fields`
def get_a2( self ):
return self.a2 or self.a
def get_b2( self ):
return self.b2 or self.b
return type( 'My{}'.format(Kls.__name__), (Kls, ), {
'__init__' : my_init,
'get_a2' : get_a2,
'get_b2' : get_b2,
})
test = extended_base(Base)(a=3, a2=9)
test.a
test.a2
test.get_a2()
test.get_b2()
As you see, I just want to extend some properties of the base class, and I would love to be able to do this by simply specifying the properties that should be extended in the additional_fields of the function.
I have two problems: I do not know how to call the parents __init__ method, the two ways shown above both give errors The second is, I do not know how to define an anonymous(?) function that would, for example, do the same for the property c if I add c to the additional_fields-list.
I'm trying to create a partial function but with dynamic arguments that are stored as class attributes and changed accordingly. Something like the following code:
from functools import partial
def foo(*args, msg):
print(msg)
class Bar:
def __init__(self, msg):
self.msg = msg
self.functions = dict()
self.functions['foo'] = partial(foo, msg=self.msg)
def foo_method(self, *args):
return self.functions['foo'](*args)
b =Bar('1')
b.foo_method()
b.msg = '2'
b.foo_method()
Only, of course, both statements will print '1' as the partial object fixes the arguments. The only alternative I found was changing the attribute to a property and manually changing the partial attributes with the setter:
class Bar:
def __init__(self, msg):
self._msg = None
self.functions = dict()
self.functions['foo'] = partial(foo)
self.msg = msg
def foo_method(self, *args):
return self.functions['foo'](*args)
#property
def msg(self):
return self._msg
#msg.setter
def msg(self, msg):
self._msg = msg
self.functions['foo'].keywords['msg'] = msg
I would like to know if there is a more "pythonic" / efficient way to do this, since I really don't need to use properties except for this workaround.
You can use lambda instead of partial for deferred (or often referred to as "lazy") evaluation of the arguments, so that self.msg is not evaluated until the function is called:
class Bar:
def __init__(self, msg):
self.msg = msg
self.functions = dict()
self.functions['foo'] = lambda *args: foo(*args, msg=self.msg)
def foo_method(self, *args):
return self.functions['foo'](*args)
What's wrong with just storing a reference to the passed function and constructing the call on the spot? i.e.:
class Bar:
def __init__(self, msg):
self.msg = msg
self.foo = foo # a reference to foo, not needed here but used as an example
def foo_method(self, *args):
return self.foo(*args, msg=self.msg) # or just: foo(*args, msg=self.msg)
A thing that seems to be working as well is defining the function to work with a class attribute.
You can then define a function using partial with one of the arguments being the class.
class myContex:
a = 5
def my_fun(context, b, c):
print(context.a, b, c)
my_fun_partial = partial(my_fun, myContext)
my_fun_partial(4,7)
# Output: 5 4 7
myContext.a = 50
my_fun_partial = partial(my_fun, myContext)
my_fun_partial(4,7)
# Output: 50, 4, 7
The simplest possible way I can think of would be just constructing a dict and passing it double-starred to the function to unpack.
Something like:
def some_func(msg, some_arg=None):
print("Hello world") # ignore the msg for now
call_args = {}
call_args['some_arg'] = 2 # single field
call_args.update({'msg': 1, 'stuff': [2,3,4]}) # multiple at once
some_func(**call_args)
Right now, some_func will throw a TypeError because we've passed more args than the function takes. You could work around this either by having the function accept **kwargs in the signature, trimming down the arguments you don't expect or some other approach.
For now, continuing the last session:
call_args = {'msg': 'abc'} # let's get rid of those extra args
some_func(**call_args) # => prints 'Hello world'