Decorator: Maintain state - python

I need to compose information regarding the given information like what parameter the given function takes etc. The example what I would like to do is
#author("Joey")
#parameter("name", type=str)
#parameter("id", type=int)
#returns("Employee", desc="Returns employee with given details", type="Employee")
def get_employee(name, id):
//
// Some logic to return employee
//
Skeleton of decorator could be as follows:
json = {}
def author(author):
def wrapper(func):
def internal(*args, **kwargs):
json["author"] = name
func(args, kwargs)
return internal
return wrapepr
Similarly, parameter decorator could be written as follows:
def parameter(name, type=None):
def wrapper(func):
def internal(*args, **kwargs):
para = {}
para["name"] = name
para["type"] = type
json["parameters"].append = para
func(args, kwargs)
return internal
return wrapepr
Similarly, other handlers could be written. At the end, I can just call one function which would get all formed JSONs for each function.
End output could be
[
{fun_name, "get_employee", author: "Joey", parameters : [{para_name : Name, type: str}, ... ], returns: {type: Employee, desc: "..."}
{fun_name, "search_employee", author: "Bob", parameters : [{para_name : age, type: int}, ... ], returns: {type: Employee, desc: "..."}
...
}
]
I'm not sure how I can maintain the state and know to consolidate the data regarding one function should be handled together.
How can I achieve this?

I don't know if I fully get your use case, but wouldn't it work to add author to your current functions as:
func_list = []
def func(var):
return var
json = {}
json['author'] = 'JohanL'
json['func'] = func.func_name
func.json = json
func_list.append(func.json)
def func2(var):
return var
json = {}
json['author'] = 'Ganesh'
func2.json = json
func_list.append(func2.json)
This can be automated using a decorator as follows:
def author(author):
json = {}
def author_decorator(func):
json['func'] = func.func_name
json['author'] = author
func.json = json
return func
return author_decorator
def append(func_list):
def append_decorator(func):
func_list.append(func.json)
return func
return append_decorator
func_list = []
#append(func_list)
#author('JohanL')
def func(var):
return var
#append(func_list)
#author('Ganesh')
def func2(var):
return var
Then you can access the json dict as func.json and func2.json or find the functions in the func_list. Note that for the decorators to work, you have to add them in the order I have put them and I have not added any error handling.
Also, if you prefer the func_list to not be explicitly passed, but instead use a globaly defined list with an explicit name, the code can be somewhat simplified to:
func_list = []
def author(author):
json = {}
def author_decorator(func):
json['func'] = func.func_name
json['author'] = author
func.json = json
return func
return author_decorator
def append(func):
global func_list
func_list.append(func.json)
return func
#append
#author('JohanL')
def func(var):
return var
#append
#author('Ganesh')
def func2(var):
return var
Maybe this is sufficient for you?

Related

Python - list comprehension as a decorator (including self)

I have two functions:
job_status is getting a response from boto3 api.
jobs_detailsis a list comprehension that performs job_status on each element of the input list.
I want to change jobs_details into a decorator of jobs_status but below solutions throws inner() takes 1 positional argument but 2 were given error.
Appreciate any comment/alternative approach to my issue. Thanks!
import boto3
class GlueClient:
def __init__(self):
self.glue_client = boto3.client('glue')
#self.envs = envs
def jobs_list(self):
response = self.glue_client.list_jobs()
result = response["JobNames"]
while "NextToken" in response:
response = self.glue_client.list_jobs(NextToken=response["NextToken"])
result.extend(response["JobNames"])
return [e for e in result if "jobs_xyz" in e]
#WHAT IS CURRENTLY
def job_status(self, job_name):
paginator = self.glue_client.get_paginator('get_job_runs')
response = paginator.paginate(JobName=job_name)
return response
def jobs_details(self, jobs):
return [self.job_status(e) for e in jobs]
#WHAT IS EXPECTED
def pass_by_list_comprehension(func):
def inner(list_of_val):
return [func(value) for value in list_of_val ]
return inner
#pass_by_list_comprehension
def job_status(self, job_name):
paginator = self.glue_client.get_paginator('get_job_runs')
response = paginator.paginate(JobName=job_name)
return response
glue_client = GlueClient()
jobs = glue_client.jobs_list()
jobs_status = glue_client.job_status(jobs)
print(jobs)
You want something like:
import boto3
from typing import Callable
def handle_iterable_input(func):
def inner(self, list_of_val):
return [func(self, value) for value in list_of_val]
return inner
class GlueClient:
def __init__(self):
self.glue_client = boto3.client('glue')
#self.envs = envs
def jobs_list(self):
response = self.glue_client.list_jobs()
result = response["JobNames"]
while "NextToken" in response:
response = self.glue_client.list_jobs(NextToken=response["NextToken"])
result.extend(response["JobNames"])
return [e for e in result if "jobs_xyz" in e]
#handle_iterable_input
def job_status(self, job_name):
paginator = self.glue_client.get_paginator('get_job_runs')
response = paginator.paginate(JobName=job_name)
return response
glue_client = GlueClient()
jobs = glue_client.jobs_list()
jobs_status = glue_client.job_status(jobs)
print(jobs)
This is the most basic way to make your decorator handle methods properly, by explicitly handling the passing of self. Note, it assumes the function being decorated will only take a single argument.
If all you want to do is make job_status iterate through a list of job names instead of operating on just one, something like this should work:
def jobs_status(self, job_names):
paginator = self.glue_client.get_paginator('get_job_runs')
return [paginator.paginate(JobName=job_name) for job_name in job_names]
Using a decorator to change what parameters a method expects seems like a bad idea.
Also, naming your class GlueClient would imply that it is a glue client. The fact that it has an attribute named glue_client makes me suspect you could probably choose a clearer name for one or both of them. (However, I'm not familiar with the package you're using.)

Templated object generation in python

What is a good design pattern to implement templated object generation (not sure that's the name) in python?
By that, I mean having a function such as:
from typing import TypeVar
T = TypeVar('T')
def mk_templated_obj_factory(template: T) -> Callable[..., T]:
"""Returns a f(**kwargs) function that returns an object of type T created by a template of the same type."""
Python has templated strings. Something like `"this {is} a {template}".format' would be how one could achieve the above. If we want to get a "proper" function that has a signature (useful for a user so they know what arguments they need to provide!), we could do this:
from inspect import signature, Signature, Parameter
from operator import itemgetter
from typing import Callable
f = "hello {name} how are you {verb}?".format
def templated_string_func(template: str) -> Callable:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
names = filter(None, map(itemgetter(1), string.Formatter().parse(template)))
params = [Parameter(name=name, kind=Parameter.KEYWORD_ONLY) for name in names]
f.__signature__ = Signature(params)
return f
f = templated_string_func("hello {name} how are you {verb}?")
assert f(name='Christian', verb='doing') == 'hello Christian how are you doing?'
assert str(signature(f)) == '(*, name, verb)'
But would if we want to make dict factories? Something having this behavior:
g = templated_dict_func(template={'hello': '$name', 'how are you': ['$verb', 2]})
assert g(name='Christian', verb='doing') == {'hello': '$name', 'how are you': ['doing', 2]}
What about other types of objects?
It seems like something that would have a solid design pattern...
I would recommend using decorators to register your template function generating functions in a dictionary that maps from types to the functions that handle them. The dictionary is needed in order to be able to template objects of any type in an extensible way, without writing all the templating logic in a single big function, but instead adding handling logic for new types as needed.
The core code is in the Templater class, just grouped here for organisation:
class Templater:
templater_registry: dict[type, Callable[[Any], TemplateFunc]] = {}
#classmethod
def register(cls, handles_type: type):
def decorator(f):
cls.templater_registry[handles_type] = f
return f
return decorator
...
Where TemplateFunc is defined as Generator[str, None, Callable[..., T]], a generator that yields strs and returns a function that returns some type T. This is chosen so that the template handlers can yield the names of their keyword arguments and then return their template function. The Templater.template_func method uses a something of type TemplateFunc to generate a function with the correct signature.
The register decorator presented above is written such that:
#Templater.register(dict)
def templated_dict_func(template: dict[K, V]):
pass
is equivalent to:
def templated_dict_func(template: dict[K, V]):
pass
Templater.templater_registry[dict] = templated_dict_func
The code for templating any type is fairly self-explainatory:
class Templater:
...
#classmethod
def template_func_generator(cls, template: T) -> TemplateFunc[T]:
# if it is a type that can be a template
if type(template) in cls.templater_registry:
# then return the template handler
template_factory = cls.templater_registry[type(template)]
return template_factory(template)
else:
# else: an empty generator that returns a function that returns the template unchanged,
# since we don't know how to handle it
def just_return():
return lambda: template
yield # this yield is needed to tell python that this is a generator
return just_return()
The code for templating strings is fairly unchanged, except that the argument names are yielded instead of put in the function signature:
#Templater.register(str)
def templated_string_func(template: str) -> TemplateFunc[str]:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
yield from filter(None, map(itemgetter(1), string.Formatter().parse(template)))
return f
The list template function could look like this:
#Templater.register(list)
def templated_list_func(template: list[T]) -> TemplateFunc[list[T]]:
entries = []
for item in template:
item_template_func = yield from Templater.template_func_generator(item)
entries.append(item_template_func)
def template_func(**kwargs):
return [
item_template_func(**kwargs)
for item_template_func in entries
]
return template_func
Although, if you cannot guarantee that every template function can handle extra arguments, you need to track which arguments belong to which elements and only pass the necessary ones. I use the get_generator_return utility function (defined later on) to capture both the yielded values and the return value of the recursive calls.
#Templater.register(list)
def templated_list_func(template: list[T]) -> TemplateFunc[list[T]]:
entries = []
for item in template:
params, item_template_func = get_generator_return(Templater.template_func_generator(item))
params = tuple(params)
yield from params
entries.append((item_template_func, params))
def template_func(**kwargs):
return [
item_template_func(**{arg: kwargs[arg] for arg in args})
for item_template_func, args in entries
]
return template_func
The dict handler is implemented similarly. This system could be extended to support all kinds of different objects, including arbitrary dataclasses and more, but I leave that as an exercise for the reader!
Here is the entire working example:
import string
from functools import partial
from inspect import Signature, Parameter
from operator import itemgetter
from typing import Callable, Any, TypeVar, Generator, Tuple, Dict, List
from collections import namedtuple
T = TypeVar('T')
U = TypeVar('U')
def get_generator_return(gen: Generator[T, Any, U]) -> Tuple[Generator[T, Any, U], U]:
return_value = None
def inner():
nonlocal return_value
return_value = yield from gen
gen_items = list(inner())
def new_gen():
yield from gen_items
return return_value
return new_gen(), return_value
# TemplateFunc: TypeAlias = Generator[str, None, Callable[..., T]]
TemplateFunc = Generator[str, None, Callable[..., T]]
class Templater:
templater_registry: Dict[type, Callable[[Any], TemplateFunc]] = {}
#classmethod
def register(cls, handles_type: type):
def decorator(f):
cls.templater_registry[handles_type] = f
return f
return decorator
#classmethod
def template_func_generator(cls, template: T) -> TemplateFunc[T]:
if type(template) in cls.templater_registry:
template_factory = cls.templater_registry[type(template)]
return template_factory(template)
else:
# an empty generator that returns a function that returns the template unchanged,
# since we don't know how to handle it
def just_return():
return lambda: template
yield # this yield is needed to tell python that this is a generator
return just_return()
#classmethod
def template_func(cls, template: T) -> Callable[..., T]:
gen = cls.template_func_generator(template)
params, f = get_generator_return(gen)
f.__signature__ = Signature(Parameter(name=param, kind=Parameter.KEYWORD_ONLY) for param in params)
return f
#Templater.register(str)
def templated_string_func(template: str) -> TemplateFunc[str]:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
yield from filter(None, map(itemgetter(1), string.Formatter().parse(template)))
return f
K = TypeVar('K')
V = TypeVar('V')
#Templater.register(dict)
def templated_dict_func(template: Dict[K, V]) -> TemplateFunc[Dict[K, V]]:
DictEntryInfo = namedtuple('DictEntryInfo', ['key_func', 'value_func', 'key_args', 'value_args'])
entries: list[DictEntryInfo] = []
for key, value in template.items():
key_params, key_template_func = get_generator_return(Templater.template_func_generator(key))
value_params, value_template_func = get_generator_return(Templater.template_func_generator(value))
key_params = tuple(key_params)
value_params = tuple(value_params)
yield from key_params
yield from value_params
entries.append(DictEntryInfo(key_template_func, value_template_func, key_params, value_params))
def template_func(**kwargs):
return {
entry_info.key_func(**{arg: kwargs[arg] for arg in entry_info.key_args}):
entry_info.value_func(**{arg: kwargs[arg] for arg in entry_info.value_args})
for entry_info in entries
}
return template_func
#Templater.register(list)
def templated_list_func(template: List[T]) -> TemplateFunc[List[T]]:
entries = []
for item in template:
params, item_template_func = get_generator_return(Templater.template_func_generator(item))
params = tuple(params)
yield from params
entries.append((item_template_func, params))
def template_func(**kwargs):
return [
item_template_func(**{arg: kwargs[arg] for arg in args})
for item_template_func, args in entries
]
return template_func
g = Templater.template_func(template={'hello': '{name}', 'how are you': ['{verb}', 2]})
assert g(name='Christian', verb='doing') == {'hello': 'Christian', 'how are you': ['doing', 2]}
print(g.__signature__)

Python client authentication with decorators

I want to build a python client on top of a REST API that uses authentication with a api_token. Hence all api calls require the api_token. As it is pretty ugly to add a field
'token=...'
e.g.
a = f1(5, token='token')
b = f2(6, 12, token='token')
c = f3(2, 'a', token='token')
where internally f1 and f2 delegate to the REST api
to each function call. What I would like to have is something like:
auth = authenticate('token')
a = f1(5)
b = f2(6, 12,)
c = f3(2, 'a')
What I can do is to create a class and make all functions member functions. Hence, we would have:
auth = calculator('token')
a = auth.f1(5)
b = auth.f2(6, 12,)
c = auth.f3(2, 'a')
but that would also be somewhat ugly. I am trying to get this to work with decorators, but to no avail so far.
class authenticate:
def __init__(self, token):
self.token = token
def __call__(self, func):
def functor(*args, **kwargs):
return func(*args, **kwargs, key=self.authentication)
return functor
#authenticate
def f1(a, key):
data = a
result = requests.get(1, data, key)
return result
However, this seems to be going nowhere. I am also wondering whether this might work at all as decorators are executed at import time and the token is added at runtime.
Any suggestions on how to make this work or anyone know if there is another standard pattern for this?
So after some hacking around we came up with the following:
class authenticate:
# start empty key
key = None
#classmethod
""" add the token """
def set_key(cls, token):
cls.token = token
def __init__(self, func=None):
if func is not None:
self.func = func
else:
print('no function')
def __call__(self, *arg):
"""
add authentication to function func
"""
ret = self.func(*arg, auth_key=self.key)
return ret
#authenticate
def f1(a, key):
data = a
result = requests.get(1, data, key)
return result
Then you can run code like:
authentication_key = 'token'
print('Initiate class')
authenticate().set_key(key=authentication_key)
print('Run f1(5)')
a1 = f1(5) # no token needed!
a2 = f2(6, 12) # again no token needed as it is in the decorator
print(a1)
This works more or less as I hoped and I find it cleaner than the class methods. If anyone has a better suggestion or improvements let me know.

Converting Nested Json into Python object

I have nested json as below
{
"product" : "name",
"protocol" : "scp",
"read_logs" : {
"log_type" : "failure",
"log_url" : "htttp:url"
}
}
I am trying to create Python class object with the below code.
import json
class Config (object):
"""
Argument: JSON Object from the configuration file.
"""
def __init__(self, attrs):
if 'log_type' in attrs:
self.log_type = attrs['log_type']
self.log_url = attrs['log_url']
else:
self.product = attrs["product"]
self.protocol = attrs["protocol"]
def __str__(self):
return "%s;%s" %(self.product, self.log_type)
def get_product(self):
return self.product
def get_logurl(self):
return self.log_url
class ConfigLoader (object):
'''
Create a confiuration loaded which can read JSON config files
'''
def load_config (self, attrs):
with open (attrs) as data_file:
config = json.load(data_file, object_hook=load_json)
return config
def load_json (json_object):
return Config (json_object)
loader = ConfigLoader()
config = loader.load_config('../config/product_config.json')
print config.get_protocol()
But, the object_hook is invoking the load_json recursively and the Class Config init is being called twice. So the final object that I created does not contain the nested JSON data.
Is there any way to read the entire nested JSON object into a single Python class ?
Thanks
A variation on Pankaj Singhal's idea, but using a "generic" namespace class instead of namedtuples:
import json
class Generic:
#classmethod
def from_dict(cls, dict):
obj = cls()
obj.__dict__.update(dict)
return obj
data = '{"product": "name", "read_logs": {"log_type": "failure", "log_url": "123"}}'
x = json.loads(data, object_hook=Generic.from_dict)
print(x.product, x.read_logs.log_type, x.read_logs.log_url)
namedtuple & object_hook can help create a one-liner:
# Create an object with attributes corresponding to JSON keys.
def json_to_obj(data): return json.loads(data, object_hook=lambda converted_dict: namedtuple('X', converted_dict.keys())(*converted_dict.values()))
OR Create a more readable function like below:
def _object_hook(converted_dict): return namedtuple('X', converted_dict.keys())(*converted_dict.values())
def json_to_obj(data): return json.loads(data, object_hook=_object_hook)
Below is the code snippet to use it:
import json
from collections import namedtuple
data = '{"product": "name", "read_logs": {"log_type": "failure", "log_url": htttp:url}}'
x = json_to_obj(data)
print x.product, x.read_logs.log_type, x.read_logs.log_url
NOTE: Check out namedtuple's rename parameter.
I wrote a simple DFS algorithm to do this job.
Convert nested item as a flat dictionary. In my case, I joined the keys of json item with a dash.
For example, nested item { "a":[{"b": "c"}, {"d":"e"}] } will be transformed as {'a-0-b': 'c', 'a-1-d': 'e'}.
def DFS(item, headItem, heads, values):
if type(item) == type({}):
for k in item.keys():
DFS(item[k], headItem + [k], heads, values)
elif type(item) == type([]):
for i in range(len(item)):
DFS(item[i], headItem + [str(i)], heads, values)
else:
headItemStr = '-'.join(headItem)
heads.append(headItemStr)
values.append(item)
return
def reduce(jsonItem):
heads, values = [], []
DFS(jsonItem, [], heads, values)
return heads, values
def json2dict(jsonItem):
head, value = reduce(jsonItem)
dictHeadValue = { head[i] : value[i] for i in range(len(head))}
return dictHeadValue

Lazy parameter binding in Python

I tried to design a workflow using composite pattern, the sample codes looks like this:
class CommandInterface(object):
def __init__(self, name, template=None, tool=None, param : dict={},input=None, output=None ):
self.name = name
self.input = input
self.output = output
self.param = param
self.template = template
def before_invoke(self):
pass # check before invoke
def invoke(self):
pass
def after_invoke(self):
pass # check after invoke
class ShellCommand(CommandInterface):
def render(self):
cmd = self.template.format(input=self.input,
output=self.output,
param=self.param,
)
return cmd
def before_invoke(self):
pass
def invoke(self):
os.system(self.render())
class Workflow(CommandInterface):
def __init__(self, name):
self.name = name
self._input = []
self._output = []
self._commands = []
self._tool = []
def add(self, command : CommandInterface):
self._commands.append(command)
return self
def invoke(self):
for command in self._commands:
command.invoke()
And can be used like this:
workflow = Workflow()
raw_files = ["file1", "file2", "file3"]
for i in raw_files:
head_command = ShellCommand("head {input} {output}",
input = i,
output = i+"_head")
workflow.add(head_command)
merge_command = ShellCommand("cat {input} > {output}",
input=" ".join([i+"_head" for i in rawfiles]),
output="merged")
workflow.add(merge_command)
workflow.invoke()
However, sometimes a command's input might be another command's result, for example:
class PythonCommand(CommandInterface):
def invoke(self):
self._result = self.template(input=self.input, output=self.output, param=self.param)
def a_command(input, output, param):
take_long_time(input, output, param)
return {"a_result": "some result will be used in b_command"}
def b_command(input, output, param):
def need_the_result_of_a_command(input, output, param):
return param["a_result"]
need_the_result_of_a_command(input, output, param)
return "success"
a = PythonCommand(a_command, input_, output_, param_)
a.invoke()
b = PythonCommand(b_command, input_, output_, param= a._result)
b.invoke()
I can't use a workflow to composite these 2 command because b uses a's result as parameter, which is only visiable after a is invoked. However, a workflow manager like the codes before is still necessary in some situations. I think I need something like this:
workflow = Workflow()
a = PythonCommand(a_command, input_, output_, param_)
workflow.add(a)
b = PythonCommand(b_command, input_, output_, param= {"a_result": a.lazy()._result})
workflow.add(b)
workflow.invoke()
Does anyone have ideas about how to design a workflow with .lazy() implementation like this?

Categories