How can I use click.MultiCommand together with commands defined as classmethods?
I'm trying to setup a plugin system for converters where users of the library can provide their own converters. For this system I'm setting up a CLI like the following:
$ myproj convert {converter} INPUT OUTPUT {ARGS}
Each converter is its own class and all inherit from BaseConverter. In the BaseConverter is the most simple Click command which only takes input and output.
For the converters that don't need more than that, they don't have to override that method. If a converter needs more than that, or needs to provide additional documentation, then it needs to be overridden.
With the code below, I get the following error when trying to use the cli:
TypeError: cli() missing 1 required positional argument: 'cls'
conversion/
├── __init__.py
└── backends/
├── __init__.py
├── base.py
├── bar.py
├── baz.py
└── foo.py
# cli.py
from pydoc import locate
import click
from proj.conversion import AVAILABLE_CONVERTERS
class ConversionCLI(click.MultiCommand):
def list_commands(self, ctx):
return sorted(list(AVAILABLE_CONVERTERS))
def get_command(self, ctx, name):
return locate(AVAILABLE_CONVERTERS[name] + '.cli')
#click.command(cls=ConversionCLI)
def convert():
"""Convert files using specified converter"""
pass
# conversion/__init__.py
from django.conf import settings
AVAILABLE_CONVERTERS = {
'bar': 'conversion.backends.bar.BarConverter',
'baz': 'conversion.backends.baz.BazConverter',
'foo': 'conversion.backends.foo.FooConverter',
}
extra_converters = getattr(settings, 'CONVERTERS', {})
AVAILABLE_CONVERTERS.update(extra_converters)
# conversion/backends/base.py
import click
class BaseConverter():
#classmethod
def convert(cls, infile, outfile):
raise NotImplementedError
#classmethod
#click.command()
#click.argument('infile')
#click.argument('outfile')
def cli(cls, infile, outfile):
return cls.convert(infile, outfile)
# conversion/backends/bar.py
from proj.conversion.base import BaseConverter
class BarConverter(BaseConverter):
#classmethod
def convert(cls, infile, outfile):
# do stuff
# conversion/backends/foo.py
import click
from proj.conversion.base import BaseConverter
class FooConverter(BaseConverter):
#classmethod
def convert(cls, infile, outfile, extra_arg):
# do stuff
#classmethod
#click.command()
#click.argument('infile')
#click.argument('outfile')
#click.argument('extra-arg')
def cli(cls, infile, outfile, extra_arg):
return cls.convert(infile, outfile, extra_arg)
To use a classmethod as a click command, you need to be able to populate the cls parameter when invoking the command. That can be done with a custom click.Command class like:
Custom Class:
import click
class ClsMethodClickCommand(click.Command):
def __init__(self, *args, **kwargs):
self._cls = [None]
super(ClsMethodClickCommand, self).__init__(*args, **kwargs)
def main(self, *args, **kwargs):
self._cls[0] = args[0]
return super(ClsMethodClickCommand, self).main(*args[1:], **kwargs)
def invoke(self, ctx):
ctx.params['cls'] = self._cls[0]
return super(ClsMethodClickCommand, self).invoke(ctx)
Using the Custom Class:
class MyClassWithAClickCommand:
#classmethod
#click.command(cls=ClsMethodClickCommand)
....
def cli(cls, ....):
....
And then in the click.Multicommand class you need to populate the _cls attribute since the command.main is not called in this case:
def get_command(self, ctx, name):
# this is hard coded in this example but presumably
# would be done with a lookup via name
cmd = MyClassWithAClickCommand.cli
# Tell the click command which class it is associated with
cmd._cls[0] = MyClassWithAClickCommand
return cmd
How does this work?
This works because click is a well designed OO framework. The #click.command() decorator usually instantiates a click.Command object but allows this behavior to be over ridden with the cls parameter. So it is a relatively easy matter to inherit from click.Command in our own class and over ride desired methods.
In this case, we override click.Command.invoke() and then add the containing class to the ctx.params dict as cls before invoking the command handler.
Test Code:
class MyClassWithAClickCommand:
#classmethod
#click.command(cls=ClsMethodClickCommand)
#click.argument('arg')
def cli(cls, arg):
click.echo('cls: {}'.format(cls.__name__))
click.echo('cli: {}'.format(arg))
class ConversionCLI(click.MultiCommand):
def list_commands(self, ctx):
return ['converter_x']
def get_command(self, ctx, name):
cmd = MyClassWithAClickCommand.cli
cmd._cls[0] = MyClassWithAClickCommand
return cmd
#click.command(cls=ConversionCLI)
def convert():
"""Convert files using specified converter"""
if __name__ == "__main__":
commands = (
'converter_x an_arg',
'converter_x --help',
'converter_x',
'--help',
'',
)
import sys, time
time.sleep(1)
print('Click Version: {}'.format(click.__version__))
print('Python Version: {}'.format(sys.version))
for cmd in commands:
try:
time.sleep(0.1)
print('-----------')
print('> ' + cmd)
time.sleep(0.1)
convert(cmd.split())
except BaseException as exc:
if str(exc) != '0' and \
not isinstance(exc, (click.ClickException, SystemExit)):
raise
Results:
Click Version: 6.7
Python Version: 3.6.3 (v3.6.3:2c5fed8, Oct 3 2017, 18:11:49) [MSC v.1900 64 bit (AMD64)]
-----------
> converter_x an_arg
class: MyClassWithAClickCommand
cli: an_arg
-----------
> converter_x --help
Usage: test.py converter_x [OPTIONS] ARG
Options:
--help Show this message and exit.
-----------
> converter_x
Usage: test.py converter_x [OPTIONS] ARG
Error: Missing argument "arg".
-----------
> --help
Usage: test.py [OPTIONS] COMMAND [ARGS]...
Convert files using specified converter
Options:
--help Show this message and exit.
Commands:
converter_x
-----------
>
Usage: test.py [OPTIONS] COMMAND [ARGS]...
Convert files using specified converter
Options:
--help Show this message and exit.
Commands:
converter_x
#Stephen Rauch's answer was inspirational to me, but didn't quite do it either. While I think it's a more complete answer for the OP, it doesn't quite work the way I wanted insofar as making any arbitrary click command/group work like a classmethod.
It also doesn't work with click's built-in decorators like click.pass_context and click.pass_obj; that's not so much its fault though as that click is really not designed to work on methods--it always passes the context as the first argument, even if that argument should be self/cls.
My use case was I already have a base class for microservices that provides a base CLI for starting them (which generally isn't overridden). But the individual services subclass the base class, so the default main() method on the class is a classmethod, and instantiates an instance of the given subclass.
I wanted to convert the CLI to using click (to make it more extensible) while keeping the existing class structure, but click is really not particularly designed to work with OOP, though this can be worked around.
import click
import types
from functools import update_wrapper, partial
class BoundCommandMixin:
def __init__(self, binding, wrapped, with_context=False, context_arg='ctx'):
self.__self__ = binding
self.__wrapped__ = wrapped
callback = types.MethodType(wrapped.callback, binding)
if with_context:
def context_wrapper(*args, **kwargs):
ctx = obj = click.get_current_context()
if isinstance(with_context, type):
obj = ctx.find_object(with_context)
kwargs[context_arg] = obj
return ctx.invoke(callback, *args, **kwargs)
self.callback = update_wrapper(context_wrapper, callback)
else:
self.callback = callback
def __repr__(self):
wrapped = self.__wrapped__
return f'<bound {wrapped.__class__.__name__} {wrapped.name} of {self.__self__!r}>'
def __getattr__(self, attr):
return getattr(self.__wrapped__, attr)
class classcommand:
_bound_cls_cache = {}
def __new__(cls, command=None, **kwargs):
if command is None:
# Return partially-applied classcommand for use as a decorator
return partial(cls, **kwargs)
else:
# Being used directly as a decorator without arguments
return super().__new__(cls)
def __init__(self, command, with_context=False, context_arg='ctx'):
self.command = command
self.with_context = with_context
self.context_arg = context_arg
def __get__(self, obj, cls=None):
if cls is None:
cls = type(obj)
cmd_type = type(self.command)
bound_cls = self._bound_cls_cache.setdefault(cmd_type,
type('Bound' + cmd_type.__name__, (BoundCommandMixin, cmd_type), {}))
return bound_cls(cls, self.command, self.with_context, self.context_arg)
First it introduces a notion of a "BoundCommand", which is sort of an extension of the notion of a bound method. In fact it just proxies a Command instance, but in fact replaces the command's original .callback attribute with a bound method on the callback, bound to either a class or instance depending on what binding is.
Since click's #pass_context and #pass_obj decorators don't really work with methods, it also provides replacement for the same functionality. If with_context=True the original callback is wrapped in a wrapper that provides the context as a keyword argument ctx (instead of as the first argument). The name of the argument can also be overridden by specifying context_arg.
If with_context=<some type>, the wrapper works the same as click's make_pass_decorator factory for the given type. Note: IIUC if you set with_context=object this is equivalent to #pass_obj.
The second part of this is the decorator class #classcommand, somewhat analogous to #classmethod. It implements a descriptor which simply returns BoundCommands for the wrapped Command.
Here's an example usage:
>>> class Foo:
... #classcommand(with_context=True)
... #click.group(no_args_is_help=False, invoke_without_command=True)
... #click.option('--bar')
... def main(cls, ctx, bar):
... print(cls)
... print(ctx)
... print(bar)
...
>>> Foo.__dict__['main']
<__main__.classcommand object at 0x7f1b471df748>
>>> Foo.main
<bound Group main of <class '__main__.Foo'>>
>>> try:
... Foo.main(['--bar', 'qux'])
... except SystemExit:
... pass
...
<class '__main__.Foo'>
<click.core.Context object at 0x7f1b47229630>
qux
In this example you can still extend the command with sub-commands as simple functions:
>>> #Foo.main.command()
... #click.option('--fred')
... def subcommand(fred):
... print(fred)
...
>>> try:
... Foo.main(['--bar', 'qux', 'subcommand', '--fred', 'flintstone'])
... except SystemExit:
... pass
...
...
<class '__main__.Foo'>
<click.core.Context object at 0x7f1b4715bb38>
qux
flintstone
One possible shortcoming to this is that the sub-commands are not tied to the BoundCommand, but just to the original Group object. So any subclasses of Foo will share the same subcommands as well, and could override each other. For my case this is not a problem, but it's worth considering. I believe a workaround would be possible, e.g. perhaps creating a copy of the original Group for each class it's bound to.
You could similarly implement an #instancecommand decorator for creating commands on instance methods. That's not a use case I have though so it's left as an exercise to the reader ^^
Update: I later came up with yet another solution to this problem, which is sort of a synthesis of my previous solutions, but I think a little bit simpler. I have packaged this solution as a new package objclick which can be used as a drop-in replacement for click like:
import objclick as click
I believe this can be used to solve the OP's problem. For example, to make a command from a "classmethod" you would write:
class BaseConverter():
#classmethod
def convert(cls, infile, outfile):
raise NotImplementedError
#click.classcommand()
#click.argument('infile')
#click.argument('outfile')
def cli(cls, infile, outfile):
return cls.convert(infile, outfile)
where objclick.classcommand provides classmethod-like functionality (it is not necessary to specify classmethod explicitly; in fact currently this will break).
Old answer:
I came up with a different solution to this that I think is much simpler than my previous answer. Since I primarily needed this for click.group(), rather than use click.group() directly I came up with the descriptor+decorator classgroup. It works as a wrapper to click.group(), but creates a new Group instance whose callback is in a sense "bound" to the class on which it was accessed:
import click
from functools import partial, update_wrapper
class classgroup:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.callback = None
self.recursion_depth = 0
def __call__(self, callback):
self.callback = callback
return self
def __get__(self, obj, owner=None):
# The recursion_depth stuff is to work around an oddity where
# click.group() uses inspect.getdoc on the callback to get the
# help text for the command if none was provided via help=
# However, inspect.getdoc winds up calling the equivalent
# of getattr(owner, callback.__name__), causing a recursion
# back into this descriptior; in this case we just return the
# wrapped callback itself
self.recursion_depth += 1
if self.recursion_depth > 1:
self.recursion_depth -= 1
return self.callback
if self.callback is None:
return self
if owner is None:
owner = type(obj)
key = '_' + self.callback.__name__
# The Group instance is cached in the class dict
group = owner.__dict__.get(key)
if group is None:
def callback(*args, **kwargs):
return self.callback(owner, *args, **kwargs)
update_wrapper(callback, self.callback)
group = click.group(*self.args, **self.kwargs)(callback)
setattr(owner, key, group)
self.recursion_depth -= 1
return group
Additionally, I added the following decorator based on click's pass_context and pass_obj, but that I think is a little more flexible:
def with_context(func=None, obj_type=None, context_arg='ctx'):
if func is None:
return partial(with_context, obj_type=obj_type, context_arg=context_arg)
def context_wrapper(*args, **kwargs):
ctx = obj = click.get_current_context()
if isinstance(obj_type, type):
obj = ctx.find_object(obj_type)
kwargs[context_arg] = obj
return ctx.invoke(func, *args, **kwargs)
update_wrapper(context_wrapper, func)
return context_wrapper
They can be used together like this:
>>> class Foo:
... #classgroup(no_args_is_help=False, invoke_without_command=True)
... #with_context
... def main(cls, ctx):
... print(cls)
... print(ctx)
... ctx.obj = cls()
... print(ctx.obj)
...
>>> try:
... Foo.main()
... except SystemExit:
... pass
...
<class '__main__.Foo'>
<click.core.Context object at 0x7f8cf4056b00>
<__main__.Foo object at 0x7f8cf4056128>
Subcommands can easily be attached to Foo.main:
>>> #Foo.main.command()
... #with_context(obj_type=Foo, context_arg='foo')
... def subcommand(foo):
... print('subcommand', foo)
...
>>> try:
... Foo.main(['subcommand'])
... except SystemExit:
... pass
...
<class '__main__.Foo'>
<click.core.Context object at 0x7f8ce7a45160>
<__main__.Foo object at 0x7f8ce7a45128>
subcommand <__main__.Foo object at 0x7f8ce7a45128>
Unlike my previous answer, this has the advantage that all subcommands are tied to the class through which they were declared:
>>> Foo.main.commands
{'subcommand': <Command subcommand>}
>>> class Bar(Foo): pass
>>> Bar.main.commands
{}
As an exercise, you could also easily implement a version in which the main on subclasses inherit sub-commands from parent classes, but I don't personally need that.
Related
In a class Foo I link a static method in a class variable to subclass that class later and just exchange that function with another. The class contains some methods which call this exchangable function. The code below does not produce any mypy issues.
def some_function(text: str) -> None:
print(text)
class Foo:
_some_func: ClassVar[staticmethod] = staticmethod(some_function)
def some_method(self, text: str) -> None:
self._some_func(text)
if __name__ == "__main__":
Foo().some_method("Hello World!")
Now, I am trying to improve my typing, so I want to use a callback protocol to actually add typing for Foo._some_func. I have created the following protocol class:
class SomeFuncProtocol(Protocol):
def __call__(self, __text: str) -> None:
...
It does work as long as I use _some_func: ClassVar[SomeFuncProtocol] = some_function, but I can't find a way to use staticmethod and the protocol class for typing. I wish for something like the following, but mypy tells me that staticmethod does not expect a type argument.
class Foo:
_some_func: ClassVar[staticmethod[SomeFuncProtocol]] = staticmethod(some_function)
...
Does anybody know how to do it?
I was stuck with similar thing for a while, here's what worked for me:
from typing import ClassVar, Protocol
def some_function(text: str) -> None:
print(text)
class SomeFuncProtocol(Protocol):
def __call__(self, __text: str) -> None:
return
class Foo:
_some_func: ClassVar[SomeFuncProtocol] = staticmethod(some_function)
Foo._some_func('a')
Foo()._some_func('a')
Foo._some_func = some_function
Foo()._some_func = some_function # E: Cannot assign to class variable "_some_func" via instance
The code above typechecks (except for last line that is intentionally incorrect).
You don't need staticmethod in type annotation: it's a function that (simplified) takes callable as argument and returns another callable with same signature, but with explicit sign that it doesn't accept self. So return type of staticmethod is the same callable, we can express it like this:
from typing import Any, Callable, TypeVar
_C = TypeVar('_C', bound=Callable[..., Any])
def staticmethod(func: _C) -> _C: ...
You can try it in playground.
I would like to avoid using the "test" prefix in classes and functions names and implement my own schema of the test parametrization.
I did the next code
test.py
import pytest
# class for inheritance to avoid "Test" prefix
class AtsClass:
__ATS_TEST_CLASS__ = True
# decorator to mark functions as tests (to avoid "Test" prefix)
def ats_test(f):
setattr(f, "__ATS_TEST_CLASS__", True)
return f
def test_1():
pass
#ats_test
def some_global_test():
pass
class MyClass(AtsClass):
def test_4(self):
pass
#ats_test
def some_func(self):
pass
conftest.py
import pytest
import inspect
# #pytest.hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(collector, name, obj):
# outcome = yield
# res = outcome.get_result()
if inspect.isclass(obj) and obj.__name__ != "AtsClass" and hasattr(obj, "__ATS_TEST_CLASS__") and obj.__ATS_TEST_CLASS__ == 1:
print("WE HAVE FOUND OUR CLASS")
return pytest.Class(name, parent=collector)
# outcome.force_result(pytest.Class(name, parent=collector))
if inspect.isfunction(obj) and hasattr(obj, "__ATS_TEST_CLASS__") and obj.__ATS_TEST_CLASS__ == 1:
print("WE HAVE FOUND OUR FUNCTION")
return pytest.Function(name, parent=collector)
# outcome.force_result([pytest.Function(name, parent=collector)])
def pytest_generate_tests(metafunc):
print("-->Generate: {}".format(metafunc.function.__name__))
In this case hook "pytest_pycollect_makeitem" creates test for function "some_global_test", but hook "pytest_generate_tests" is not executed for function "some_global_test".
I have found a solution, call collector._genfunctions(name, obj) from my hook. But I think it is not the right decision, cause _genfunctions is a private method and not declared.
Is there another way to solve my task?
So, nobody knows the answer and I decided to offer my solution (it can be useful for others):
class TestBaseClass:
__test__ = True
def mark_test(f):
setattr(f, "__test__", True)
return f
# using base class and decorator
class MyTestClass(TestBaseClass):
#mark_test
def some_func(self):
pass
Pytest uses attribute __test__ to detect nose-tests, so you can use nose-library or just use such base class and decorator.
If you want only to change prefix of tests you can set custom python_functionsand python_classes options at pytest.ini.
For more information follow a link.
I'm implementing custom test cases that are based on external files using the tutorial from https://docs.pytest.org/en/latest/example/nonpython.html.
I need to parametrise them with one bool flag. I'd like to be able to run pytest with a commandline option, in my case --use-real-api, which would turn using mocks off and do the real talking to a remote network API.
I've tried using the cmdopt tutorial and blend them together, but can't find any way to read the parameter from within the custom pytest.Item subclass. Could you help? Here is a trivial example from the tutorial. I'd like to get it to change the test behaviour depending on the value of the cmdopt passed.
# content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super().__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join(
[
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point.",
]
)
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
def pytest_addoption(parser):
parser.addoption(
"--cmdopt", action="store", default="type1", help="my option: type1 or type2"
)
#pytest.fixture
def cmdopt(request):
return request.config.getoption("--cmdopt")
Each collection entity in pytest (File, Module, Function etc) is a subtype of the Node class which defines access to the config object. Knowing that, the task becomes easy:
def pytest_addoption(parser):
parser.addoption('--run-yml', action='store_true')
def pytest_collect_file(parent, path):
run_yml = parent.config.getoption('--run-yml')
if run_yml and path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
Running pytest --run-yml will now collect the YAML files; without the flag, they are ignored.
Same for accessing the config in custom classes, for example:
class YamlItem(pytest.Item):
def runtest(self):
run_yml = self.config.getoption('--run-yml')
...
etc.
Is it possible to do something like this with Python Click?
#click.command(name=['my-command', 'my-cmd'])
def my_command():
pass
I want my command lines to be something like:
mycli my-command
and
mycli my-cmd
but reference the same function.
Do I need to do a class like AliasedGroup?
AliasedGroup is not what you are after, since it allows a shortest prefix match, and it appears you need actual aliases. But that example does provide hints in a direction that can work. It inherits from click.Group and overides some behavior.
Here is a one way to approach what you are after:
Custom Class
This class overides the click.Group.command() method which is used to decorate command functions. It adds the ability to pass a list of command aliases. This class also adds a short help which references the aliased command.
class CustomMultiCommand(click.Group):
def command(self, *args, **kwargs):
"""Behaves the same as `click.Group.command()` except if passed
a list of names, all after the first will be aliases for the first.
"""
def decorator(f):
if isinstance(args[0], list):
_args = [args[0][0]] + list(args[1:])
for alias in args[0][1:]:
cmd = super(CustomMultiCommand, self).command(
alias, *args[1:], **kwargs)(f)
cmd.short_help = "Alias for '{}'".format(_args[0])
else:
_args = args
cmd = super(CustomMultiCommand, self).command(
*_args, **kwargs)(f)
return cmd
return decorator
Using the Custom Class
By passing the cls parameter to the click.group() decorator, any commands added to the group via the the group.command() can be passed a list of command names.
#click.group(cls=CustomMultiCommand)
def cli():
"""My Excellent CLI"""
#cli.command(['my-command', 'my-cmd'])
def my_command():
....
Test Code:
import click
#click.group(cls=CustomMultiCommand)
def cli():
"""My Excellent CLI"""
#cli.command(['my-command', 'my-cmd'])
def my_command():
"""This is my command"""
print('Running the command')
if __name__ == '__main__':
cli('--help'.split())
Test Results:
Usage: my_cli [OPTIONS] COMMAND [ARGS]...
My Excellent CLI
Options:
--help Show this message and exit.
Commands:
my-cmd Alias for 'my-command'
my-command This is my command
Here is a simpler way to solve the same thing:
class AliasedGroup(click.Group):
def get_command(self, ctx, cmd_name):
try:
cmd_name = ALIASES[cmd_name].name
except KeyError:
pass
return super().get_command(ctx, cmd_name)
#click.command(cls=AliasedGroup)
def cli():
...
#click.command()
def install():
...
#click.command()
def remove():
....
cli.add_command(install)
cli.add_command(remove)
ALIASES = {
"it": install,
"rm": remove,
}
Since this question has been asked, a click-aliases library has been created.
It works a bit like the other answers except that you don’t have to declare the command class by yourself:
import click
from click_aliases import ClickAliasedGroup
#click.group(cls=ClickAliasedGroup)
def cli():
pass
#cli.command(aliases=['my-cmd'])
def my_command():
pass
I'm writing a Python class to wrap/decorate/enhance another class from a package called petl, a framework for ETL (data movement) workflows. Due to design constraints I can't just subclass it; every method call has to be sent through my own class so I can control what kind of objects are being passed back. So in principle this is a proxy class, but I'm having some trouble using existing answers/recipes out there. This is what my code looks like:
from functools import partial
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name)
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""this returns a partial referencing the child method"""
petl_attr = getattr(self.petl_tbl, name, None)
if petl_attr and callable(petl_attr):
return partial(self.call_petl_method, func=petl_attr)
raise NotImplementedError('Not implemented')
def call_petl_method(self, func, *args, **kwargs):
func(*args, **kwargs)
Then I try to instantiate a table and call something:
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world')
This gives a TypeError: call_petl_method() got multiple values for argument 'func'.
This only happens with positional arguments; kwargs seem to be fine. I'm pretty sure it has to do with self not being passed in, but I'm not sure what the solution is. Can anyone think of what I'm doing wrong, or a better solution altogether?
This seems to be a common issue with mixing positional and keyword args:
TypeError: got multiple values for argument
To get around it, I took the positional arg func out of call_petl_method and put it in a kwarg that's unlikely to overlap with the kwargs of the child function. A little hacky, but it works.
I ended up writing a Proxy class to do all this generically:
class Proxy(object):
def __init__(self, child):
self.child = child
def __getattr__(self, name):
child_attr = getattr(self.child, name)
return partial(self.call_child_method, __child_fn__=child_attr)
#classmethod
def call_child_method(cls, *args, **kwargs):
"""
This calls a method on the child object and wraps the response as an
object of its own class.
Takes a kwarg `__child_fn__` which points to a method on the child
object.
Note: this can't take any positional args or they get clobbered by the
keyword args we're trying to pass to the child. See:
https://stackoverflow.com/questions/21764770/typeerror-got-multiple-values-for-argument
"""
# get child method
fn = kwargs.pop('__child_fn__')
# call the child method
r = fn(*args, **kwargs)
# wrap the response as an object of the same class
r_wrapped = cls(r)
return r_wrapped
This will also solve the problem. It doesn't use partial at all.
class PetlTable(object):
"""not really how we construct petl tables, but for illustrative purposes"""
def hello(name):
print('Hello, {}!'.format(name))
class DatumTable(object):
def __init__(self, petl_tbl):
self.petl_tbl = petl_tbl
def __getattr__(self, name):
"""Looks-up named attribute in class of the petl_tbl object."""
petl_attr = self.petl_tbl.__class__.__dict__.get(name, None)
if petl_attr and callable(petl_attr):
return petl_attr
raise NotImplementedError('Not implemented')
if __name__ == '__main__':
# create a petl table
pt = PetlTable()
# wrap it with our own class
dt = DatumTable(pt)
# try to run the petl method
dt.hello('world') # -> Hello, world!