I have a function:
def dummy_func(a, b: float, c: int = 1) -> float:
#some code
I want to write a generic functor (dummy_prepender) that would take the function and construct a new one wose only difference from the original is that an extra unused parameter is prepended to signature:
dummy_method1 = dummy_prepender(dummy_func)
#dummy_method1 should behave exactly like the following code:
def dummy_method(dummy, a, b: float, c: int = 1) -> float:
#some code
so that for any set of arguments and dummy_arg
dummy_method(dummy_arg, arguments...) behave exactly the same as dummy_func(arguments...).
I thought that #staticmethod would do the trick, but there are places where the dispatch happens differently and applying staticmethod has no effect.
Some behavior tests:
help(dummy_method) should show the correct signature
dummy_method(dummy='', a=1) should fail
dummy_method(dummy='', a=1, b=2, c=3, z=4) should fail
Update:
Here is an example of staticmethod not working as expected:
Example 1
factory_type = type('Typ1', (), {
'__init__': staticmethod(dummy_func),
})
inspect.signature(factory_type)
#<Signature (b:float, c:int=1) -> float>
#BTW, in this case it's just the inspect.signature that is broken. The call behavior seems to be correct
Example 2:
factory_type2 = type('Typ1', (), {
'__new__': staticmethod(dummy_func),
})
#Call behavior is broken
factory_type2(a=1, b=2)
# TypeError: dummy_func() got multiple values for argument 'a'
factory_type2(7, 13)
# locals()={'c': 13, 'b': 7, 'a': <class '__main__.Typ1'>}
# See how the class got passed as `a` instead of `dummy`
The result of staticmethod(func) is not a function, but a descriptor object. And sometimes the behavior is not the same.
My first try would be something like this:
def dummy_method(*args, **kwargs) -> float:
return dummy_func(*args[1:], **kwargs)
The upcoming Python 3.8 implements PEP 570, which allows the following syntax:
# 'dummy' is a positional only argument, disallowing 'dummy=...'
def dummy_method(dummy, /, *args, **kwargs) -> float:
return dummy_func(*args, **kwargs)
Here another version for Python 3.8, which keeps the signature:
def dummy_method(dummy, /, a, b: float, c: int = 1) -> float:
return dummy_func(a, b, c)
Related
I am trying to examine the types of functions arguments before the call (in this example it is foo). I am using python decorators to achieve this. I don't see how I can get arguments in the same order as they are visible to the function foo. In the following example, I get two different orderings but have essentially the same function call.
def wrapper(func):
def f(*args, **kwargs):
print([type(x) for x in args] + [type(v) for v in kwargs.values()])
return func(*args, **kwargs)
return f
#wrapper
def foo(a, b, c, d):
print(f"{a} {b} {c} {d}")
foo(10, 12.5, 14, 5.2) # all good: int, float, int, float
foo(10, 12.5, d=5.2, c=14) # not what I want: int, float, float, int
Is it possible to get arguments in a consistent order? If not, then is it at least possible to get them all keyed by argument name? Something that looks like this:
def wrapper(func):
def f(**kwargs):
# kwargs = {'a': 10, 'b': 12.5, 'c': 14, 'd': 5.2}
print([type(v) for v in kwargs.values()])
return func(*args, **kwargs)
return f
foo(10, 12.5, 14, 5.2) # obviously doesn't work
The type-checking is a bit weak, the annotations works as long you annotate your code but a more robust way can be achieved by using inspect from the standard library:
it provides full access to frame, ... and everything you may need. In this case with inspect.signature can be used to fetch the signature of the original function to get a the original order of the parameters. Then just regroup the parameters and pass the final group back to the original function. More details in the comments.
from inspect import signature
def wrapper(func):
def f(*args, **kwargs):
# signature object
sign = signature(func)
# use order of the signature of the function as reference
order = order = dict.fromkeys(sign.parameters)
# update first key-values
order.update(**kwargs)
# update by filling with positionals
free_pars = (k for k, v in order.items() if v is None)
order.update(zip(free_pars, args))
return func(**order)
return f
#wrapper
def foo(a, b, c, d):
print(f"{a} {b} {c} {d}")
foo(10, 12.5, 14, 5.2)
#10 12.5 14 5.2
foo(10, 12.5, d=5.2, c=14)
#10 12.5 14 5.2
The code is annotations compatible:
#wrapper
def foo(a: int, b: float, c: int, d: float) -> None:
print(f"{a} {b} {c} {d}")
The annotation's way, no imports required:
It is a copy past of the above code but using __annotations__ attribute to get the signature. Remember that annotations may or may not have an annotation for the output
def wrapper(func):
def f(*args, **kwargs):
if not func.__annotations__:
raise Exception('No clue... inspect or annotate properly')
params = func.__annotations__
# set return flag
return_has_annotation = False
if 'return' in params:
return_has_annotation = True
# remove possible return value
return_ = params.pop('return', None)
order = dict.fromkeys(params)
order.update(**kwargs)
free_pars = (k for k, v in order.items() if v is None)
order.update(zip(free_pars, args))
# update with return annotation
if return_has_annotation:
func.__annotations__ = params | {'return': return_}
return func(**order)
return f
#wrapper
def foo(a: int, b: float, c: int, d: float) -> None:
print(f"{a} {b} {c} {d}")
The first thing to be careful of is that key word arguments are implemented because order does not matter for them and are intended to map a value to a specific argument by name at call-time. So enforcing any specific order on kwargs does not make much sense (or at least would be confusing to anyone trying to use your decorater). So you will probably want to check for which kwargs are specified and remove the corresponding argument types.
Next if you want to be able to check the argument types you will need to provide a way to tell your decorator what types you are expected by passing it an argument (you can see more about this here). The only way to do this is to pass a dictionary mapping each argument to the expected type:
#wrapper({'a': int, 'b': int, c: float, d: int})
def f(a, b, c=6.0, d=5):
pass
def wrapper(types):
def inner(func):
def wrapped_func(*args, **kwargs):
# be careful here, this only works if kwargs is ordered,
# for python < 3.6 this portion will not work
expected_types = [v for k, v in types.items() if k not in kwargs]
actual_types = [type(arg) for arg in args]
# substitute these in case you are dead set on checking for key word arguments as well
# expected_types = types
# actual_types = [type(arg) for arg in args)] + [type(v) for v in kwargs.items]
if expected_types != actual_types:
raise TypeError(f"bad argument types:\n\tE: {expected_types}\n\tA: {actual_types}")
func(*args, **kwargs)
return wrapped_func
return inner
#wrapper({'a': int, 'b': float, 'c': int})
def f(a, b, c):
print('good')
f(10, 2.0, 10)
f(10, 2.0, c=10)
f(10, c=10, b=2.0)
f(10, 2.0, 10.0) # will raise exception
Now after all of this, I want to point out that this is functionality is probably largely unwanted and unnecessary in python code. Python was designed to be dynamically typed so anything resembling strong types in python is going against the grain and won't be expected by most.
Next, since python 3.5 we have had access to the built-in typing package. This lets you specify the type that you expect to be receiving in a function call:
def f(a: int, b: float, c: int) -> int:
return a + int(b) + c
Now this won't actually do any type assertions for you, but it will make it plainly obvious what types you are expecting, and most (if not all) IDEs will give you visual warnings that you are passing the wrong type to a function.
I would like to change the input arguments of a function. This will also lead to changes within the function body.
What's a pythonic way to mark an input argument "deprecated" and maintain backward compatibility at the same time? Here's a toy example:
from typing import List
# original function
def sum_numbers(numbers: List[int]):
return sum(numbers)
# function with changed input arguments and function body
def sum_numbers(a: int, b: int) -> int:
return a + b
The user should be able to call sum_numbers either with numbers: List[int] argument or by using a: int, b: int. However, I want to submit a DeprecationWarning when the user uses the original call method.
An option is to overload the function using multipledispatch module:
from multipledispatch import dispatch
#dispatch(int, int)
def sum_numbers(a, b):
print("Deprecated")
return a + b
#dispatch(list)
def sum_numbers(numbers):
return sum(numbers)
An alternative to multipledispatch is to take in *args or have an optional arg and dispatch internally:
# original function
def sum_numbers(a, b=None):
if isinstance(a, list):
warnings.warn("...", DeprecationWarning, stacklevel=2)
return sum(numbers)
return a + b
then for typing purposes you can use typing.overload:
#typing.overload
def sum_numbers(numbers: list[int]) -> int:
"""deprecated"""
#typing.overload
def sum_numbers(a: int, b: int) -> int:
...
(note that as documented the overloads should come first and the actual implementation last)
def f() -> Callable[[ # how to show there can be any number of int?
], float]:
def g(*args):
assert all(type(x) == int for x in args)
return 0.1
return g
I read the typing docs and Callable (i.e. Callable[…, ReturnType]) is not what I need.
I know Tuple[int, …], but Callable[[int, …], float] return Error "…" not allowed in this context Pylance.
You can do this by defining a Protocol with a __call__ whose function signature has the desired typing:
from typing import Protocol
class IntCallable(Protocol):
def __call__(self, *args: int) -> float: ...
def f() -> IntCallable:
def g(*args: int) -> float:
assert all(type(x) == int for x in args)
return 0.1
return g
Testing it out with mypy:
f()(1, 2, 3) # fine
f()("foo") # error: Argument 1 to "__call__" of "IntCallable" has incompatible type "str"; expected "int"
The other option is to have your function take a single Iterable[int] argument instead of an arbitrary number of int arguments, which lets you use a simple Callable typing instead of having to go the more complex Protocol route.
I'm trying to create a function that can be used as an annotation (to later inspect the function and do stuff with the expected return values...)
def WillReturn(*kwargs):
# dark magic
return kwargs
Question is: how do I annotate WillReturn in such a way that the type hinter will give the following warnings?
def MyFunction(a, b, c) -> WillReturn(int=1, str='yo'):
return (123,
1.2, # error! expects a str at position #1
None) # error! expects two values
Idea 1:
This would do the trick but is not quite scalable:
A,B,C,D,E,F = map(typing.TypeVar, 'ABCDEF')
#typing.overload
def WillReturn(A) -> A: ...
#typing.overload
def WillReturn(A,B) -> (A,B): ...
#typing.overload
def WillReturn(A,B,C) -> (A,B,C): ...
# and so on and so forth
Idea 2:
subclass from typing.Tuple or use _VariadicGenericAlias directly, but I'm not entirely sure if this is the intended usage of either object.
Often I encounter the scenario of functions which accept a finite set of values only. I know how to reflect this behavior in the type annotations, using typing.Literal like so:
import typing
def func(a: typing.Literal['foo', 'bar']):
pass
I would like to have a decorator #validate_literals which validates that the parameters to the are consistent with their type:
#validate_literals
def picky_typed_function(
binary: typing.Literal[0, 1],
char: typing.Literal['a', 'b']
) -> None:
pass
so that the input is validated against the restrictions defined by the arguments's types, and a ValueError is raised in case of a violation:
picky_typed_function(0, 'a') # should pass
picky_typed_function(2, 'a') # should raise "ValueError: binary must be one of (0, 1)')"
picky_typed_function(0, 'c') # should raise "ValueError: char must be one of ('a', 'b')"
picky_typed_function(0, char='c') # should raise "ValueError: char must be one of ('a', 'b')"
picky_typed_function(binary=2, char='c') # should raise "ValueError: binary must be one of (0, 1)"
typing type checks are designed to be static, and not happen during runtime. How can I leverage the typing definition for runtime validation?
We can inspect the decorated (validated) function's signature by using inspect.signature, check which of the parameters of the function is typed as a Literal alias by getting the "origin" of the parameter's annotation through typing.get_origin() (or, for python versions < 3.8, using __origin__) and retrieve the valid values by using [typing.get_args()] (https://stackoverflow.com/a/64522240/3566606) (and iterating recursively over nested Literal definitions) from the Literal alias.
In order to do that, all that is left to do, is to figure out which parameters have been passed as positional arguments and map the corresponding values to the parameter's name, so the value can be compared against the valid values of the parameter.
Finally, we build the decorator using the standard recipe with functools.wraps. In the end, this is the code:
import inspect
import typing
import functools
def args_to_kwargs(func: typing.Callable, *args: list, **kwargs: dict) -> dict:
args_dict = {
list(inspect.signature(func).parameters.keys())[i]: arg
for i, arg in enumerate(args)
}
return {**args_dict, **kwargs}
def valid_args_from_literal(annotation: _GenericAlias) -> Set[Any]:
args = get_args(annotation)
valid_values = []
for arg in args:
if typing.get_origin(annotation) is Literal:
valid_values += valid_args_from_literal(arg)
else:
valid_values += [arg]
return set(valid_values)
def validate_literals(func: typing.Callable) -> typing.Callable:
#functools.wraps(func)
def validated(*args, **kwargs):
kwargs = args_to_kwargs(func, *args, **kwargs)
for name, parameter in inspect.signature(func).parameters.items():
# use parameter.annotation.__origin__ for Python versions < 3.8
if typing.get_origin(parameter.annotation) is typing.Literal:
valid_values = valid_args_from_literal(parameter.annotation)
if kwargs[name] not in valid_values:
raise ValueError(
f"Argument '{name}' must be one of {valid_values}"
)
return func(**kwargs)
return validated
This gives the results specified in the question.
I have also published the alpha version of a python package runtime-typing to perform runtime typechecking: https://pypi.org/project/runtime-typing/ (documentation:https://runtime-typing.readthedocs.io) which handles more cases than just typing.Literal, such as typing.TypeVar and typing.Union.
from typing import Literal
from valdec.dec import validate
#validate
def func(a: Literal["foo", "bar"]) -> str:
return a
assert func("bar") == "bar"
#validate("return", exclude=True)
def func(binary: Literal[0, 1], char: Literal["a", "b"]):
return binary, char
assert func(0, "a") == (0, "a")
func(2, "x")
# valdec.utils.ValidationArgumentsError: Validation error <class
# 'valdec.val_pydantic.ValidationError'>: 2 validation errors for argument
# with the name of:
# binary
# unexpected value; permitted: 0, 1 (type=value_error.const; given=2;
# permitted=(0, 1))
# char
# unexpected value; permitted: 'a', 'b' (type=value_error.const; given=x;
# permitted=('a', 'b')).
valdec: https://github.com/EvgeniyBurdin/valdec