how to compose two or more functions in python - python

I have these two functions:
def swap_joker1(afile):
idx = afile.index(27)
if idx == len(afile) - 1:
afile[idx],afile[0]=afile[0],afile[idx]
else:
afile[idx],afile[idx+1]=afile[idx+1],afile[idx]
return afile
def swap_joker2(afile):
idx = afile1.index(28)
afile[idx],afile[idx+1],afile[idx+2]=afile[idx+1],afile[idx+2],afile[idx]
return afile
how can I compose them together. and become a new function called cipher_functions?

Well, you can create your own cute function composition function:
import functools
def compose(*functions):
return functools.reduce(lambda f, g: lambda x: f(g(x)), functions)
def foo(var):
return var // 2
def bar(var):
return var + 1
if __name__ == '__main__':
# Apply bar, then foo
composition = compose(bar, foo)
print composition(6)
You can apply the functions in whatever way you like, and in as many ways as you like. This answer was made possible, with the help of this website.

A few methods:
cipher_functions = lambda afile:swap_joker2(swap_joker1(afile))
def cipher_functions(afile):
return swap_joker2(swap_joker1(afile))
import functional #third party, not maintained. Alternatives exist
cipher_functions = functional.compose(swap_joker1, swap_joker2)

It would be great if Python offered a composition operator. Unfortunately, you need to do it yourself.
def cipher_functions(afile):
# This is f(g(x)); swap for g(f(x)) if necessary
return swap_joker1(swap_joker2(afile))
You can define a composition function easily:
def compose(f, g):
return lambda *args, **kwargs: f(g(*args, **kwargs))
cipher_functions = compose(swap_joker1, swap_joker2)

Your swap_joker1 function returns either afile or None, which make composition a bit hard.
Assuming it was actually:
def swap_joker1(afile):
idx = afile.index(27)
if idx == len(afile) - 1:
afile[idx],afile[0]=afile[0],afile[idx]
else:
afile[idx],afile[idx+1]=afile[idx+1],afile[idx]
return afile
Which returns afile unconditionally, you can simply write the composition as:
def cipher_functions(afile):
return swap_joker2(swap_joker1(afile))
And more generally:
def compose(f, g):
"Returns x -> (f o g)(x) = f(g(x))"
def fog(*args, **kwargs):
return f(g(*args, **kwargs))
return fog
cipher_functions = compose(swap_joker2, swap_joker1)

Related

How to wrap a function with accurate __code__.co_argcount?

Here is my problem, i'm working with an API,
precisely with a high-order function that only accepts functions with N arguments. (I cannot monkey-patch this API).
#this is an example of a high order function i may encounter
#there are many more of such functions in the API that require N ammount of arguments
#this example fct required 3 arg, but a valid solution should adapt to any required args count
def high_order_function(f):
"""high order function expecting a function with 3 arguments!"""
print(f"\nprocessing function {f.__name__}")
if f.__code__.co_argcount!=3:
raise Exception(f"Error Expecting a function with 3 arguments, the passed function got {f.__code__.co_argcount}")
print("Function is Ok")
#...
return None
And my problem is that I simply cannot use any wrapper because of this check.
what am I supposed to do ?
def my_wrapper(func):
import functools
#functools.wraps(func)
def inner(*args, **kwargs):
print("wrapped1!")
r = func(*args,**kwargs)
print("wrapped2!")
return r
return inner
def original(a, b, c):
return None
wrapped = my_wrapper(original)
high_order_function(original)
#ok!
high_order_function(wrapped)
#will cause error
#because wrapped.__code__.co_argcount == 0 and is readonly!
After a lot of tinkering, I found a pretty procedural way that might work for you.
The trick was to use __code__.replace(). There are some caveats, probably more than I know.
def high_order_function(f):
"""high order function expecting a function with 3 arguments!"""
print(f"\nprocessing function {f.__name__}")
if f.__code__.co_argcount!=3:
raise Exception(f"Error Expecting a function with 3 arguments, the passed function got {f.__code__.co_argcount}")
print("Function is Ok")
#...
return None
def my_wrapper(func):
import functools
#functools.wraps(func)
def inner(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z):
kwargs = locals().copy()
del kwargs["func"]
print("wrapped1!")
r = func(**kwargs) # func(*kwargs.values()) would work too
print("wrapped2!")
return r
func_args = func.__code__.co_varnames
inner.__code__ = inner.__code__.replace(co_varnames=func_args, co_argcount=len(func_args))
return inner
def original(a, b, c):
return None
wrapped = my_wrapper(original)
high_order_function(original)
high_order_function(wrapped)
Result
processing function original
Function is Ok
processing function original
Function is Ok
functools.wraps changes the name of inner to original
Caveats
__code__.replace() raised ValueError: code: varnames is too small when inner's parameters were *args or **kwargs
If inner instead had no parameters then locals() inside it would not get the supplied values, therefore you got the whole alphabet instead
Inside inner you can access the parameters by the letter like normal if you're sure it´s supplied otherwise you'll get IndexError: tuple index out of range
I recommend to use e.g. kwargs.get("d") instead
__code__.replace may only be for 3.8+, it has sys.version_info >= (3, 8) in the source code
Why not define 2 version of the inner function in your wrapper based on whether you need to pass the wrapped function to higher_order_function or not.
Something like this:
def high_order_function(f):
"""high order function expecting a function with 3 arguments!"""
print(f"\nprocessing function {f.__name__}")
if f.__code__.co_argcount != 3:
raise Exception(
f"Error Expecting a function with 3 arguments, the passed function got {f.__code__.co_argcount}")
print("Function is Ok")
# ...
return None
def my_wrapper(func, higher_order_compatible=True): # switch to control whether should be compatible with the higher_order_function or not
import functools
if higher_order_compatible:
#functools.wraps(func)
def inner(a, b, c, *args, **kwargs): # extra args to satisfy the condition
print("wrapped1!")
r = func(a, b, c, *args, **kwargs)
print("wrapped2!")
return r
else:
#functools.wraps(func)
def inner(*args, **kwargs): # normal wrapper
print("wrapped1!")
r = func(*args, **kwargs)
print("wrapped2!")
return r
return inner
def original(a, b, c):
return None
wrapped_compat = my_wrapper(original, higher_order_compatible=True)
wrapped_nocompat = my_wrapper(original, higher_order_compatible=False)
print("Original")
high_order_function(original)
print("Compatible")
high_order_function(wrapped_compat)
print("Not compatible")
try:
high_order_function(wrapped_nocompat)
except:
print("no not working")
Results in:
Original
processing function original
Function is Ok
Compatible
processing function original
Function is Ok
Not compatible
processing function original
no not working
co_argcount: number of arguments (not including keyword only arguments, * or ** args)
Hence the goal is to bypass such definition. Make a fake signature with 3 fake parameters, these are taken into consideration by the code attribute co_argcount. Then the parameters of the original function must by passed as keys.
def wrapper(f):
def extended_signature(fake1=None, fake2=None, fake3=None, **kwargs):
return f(**kwargs)
return extended_signature
def a(q, w): print(q, w)
a_wrapped = wrapper(a)
high_order_function(a_wrapped)(q=1, w=2)
#processing function true_signature
#Function is Ok
This is an attempt to solve the problem
i feel like it's almost a potential solution
However it is not working as expected, strange. f()for a function object class is not as f.__call__() hmm
def my_wrapper(func):
import copy
def inner(*args, **kwargs):
print("wrapped front")
r = func(*args, **kwargs)
print("wrapped end")
return r
newfunc = copy.deepcopy(func)
newfunc.__name__ = func.__name__ + "_wrapped"
newfunc.__call__ = inner
return newfunc
def original(a, b, c=6):
print("original",a,b,c)
return None
###testing if the original function work
high_order_function(original)
#will pass requirement
###testing if the wrap works?
high_order_function(my_wrapper(original))
#will pass requirement, however the wrap did not work
well, here's a solution, couldn't find a procedural way to generate the functions... it scales up to 5 forced arguments
def my_wrapper(func):
"""see https://stackoverflow.com/questions/73601340/how-to-wrap-a-function-with-accurate-code-argcount?noredirect=1#comment129973896_73601340
yes this is a shit show, did not found a procedural way to generate functions. tried exec() code generation & was also a mess"""
#find back the expected arguments so func.__code__.co_argcount will be accurate
if (func.__defaults__ is not None):
force_arg = func.__code__.co_argcount - len(func.__defaults__)
else: force_arg = func.__code__.co_argcount
import functools
if (force_arg==0):
#functools.wraps(func)
def inner(**kwargs):
print("wrapped1!")
r = func(**kwargs)
print("wrapped2!")
return r
elif (force_arg==1):
#functools.wraps(func)
def inner(a,**kwargs):
print("wrapped1!")
r = func(a,**kwargs)
print("wrapped2!")
return r
elif (force_arg==2):
#functools.wraps(func)
def inner(a,b,**kwargs):
print("wrapped1!")
r = func(a,b,**kwargs)
print("wrapped2!")
return r
elif (force_arg==3):
#functools.wraps(func)
def inner(a,b,c,**kwargs):
print("wrapped1!")
r = func(a,b,c,**kwargs)
print("wrapped2!")
return r
elif (force_arg==4):
#functools.wraps(func)
def inner(a,b,c,d,**kwargs):
print("wrapped1!")
r = func(a,b,c,d,**kwargs)
print("wrapped2!")
return r
elif (force_arg==5):
#functools.wraps(func)
def inner(a,b,c,d,e,**kwargs):
print("wrapped1!")
r = func(a,b,c,d,e,**kwargs)
print("wrapped2!")
return r
else: raise Exception("my_wrapper() do not support more than 5 forced argument")
return inner

Python chain several functions into one

I have several string processing functions like:
def func1(s):
return re.sub(r'\s', "", s)
def func2(s):
return f"[{s}]"
...
I want to combine them into one pipeline function: my_pipeline(), so that I can use it as an argument, for example:
class Record:
def __init__(self, s):
self.name = s
def apply_func(self, func):
return func(self.name)
rec = Record(" hell o")
output = rec.apply_func(my_pipeline)
# output = "[hello]"
The goal is to use my_pipeline as an argument, otherwise I need to call these functions one by one.
Thank you.
You can write a simple factory function or class to build a pipeline function:
>>> def pipeline(*functions):
... def _pipeline(arg):
... result = arg
... for func in functions:
... result = func(result)
... return result
... return _pipeline
...
>>> rec = Record(" hell o")
>>> rec.apply_func(pipeline(func1, func2))
'[hello]'
This is a more refined version written with reference to this using functools.reduce:
>>> from functools import reduce
>>> def pipeline(*functions):
... return lambda initial: reduce(lambda arg, func: func(arg), functions, initial)
I didn't test it, but according to my intuition, each loop will call the function one more time at the python level, so the performance may not be as good as the loop implementation.
You can just create a function which calls these functions:
def my_pipeline(s):
return func1(func2(s))
Using a list of functions (so you can assemble these elsewhere):
def func1(s):
return re.sub(r'\s', "", s)
def func2(s):
return f"[{s}]"
def func3(s):
return s + 'tada '
def callfuncs(s, pipeline):
f0 = s
pipeline.reverse()
for f in pipeline:
f0 = f(f0)
return f0
class Record:
def __init__(self, s):
self.name = s
def apply_func(self, pipeline):
return callfuncs(s.name, pipeline)
# calling order func1(func2(func3(s)))
my_pipeline = [func1, func2, func3]
rec = Record(" hell o")
output = rec.apply_func(my_pipeline)

Is there something like the threading macro from Clojure in Python?

In Clojure I can do something like this:
(-> path
clojure.java.io/resource
slurp
read-string)
instead of doing this:
(read-string (slurp (clojure.java.io/resource path)))
This is called threading in Clojure terminology and helps getting rid of a lot of parentheses.
In Python if I try to use functional constructs like map, any, or filter I have to nest them to each other. Is there a construct in Python with which I can do something similar to threading (or piping) in Clojure?
I'm not looking for a fully featured version since there are no macros in Python, I just want to do away with a lot of parentheses when I'm doing functional programming in Python.
Edit: I ended up using toolz which supports pipeing.
Here is a simple implementation of #deceze's idea (although, as #Carcigenicate points out, it is at best a partial solution):
import functools
def apply(x,f): return f(x)
def thread(*args):
return functools.reduce(apply,args)
For example:
def f(x): return 2*x+1
def g(x): return x**2
thread(5,f,g) #evaluates to 121
I wanted to take this to the extreme and do it all dynamically.
Basically, the below Chain class lets you chain functions together similar to Clojure's -> and ->> macros. It supports both threading into the first and last arguments.
Functions are resolved in this order:
Object method
Local defined variable
Built-in variable
The code:
class Chain(object):
def __init__(self, value, index=0):
self.value = value
self.index = index
def __getattr__(self, item):
append_arg = True
try:
prop = getattr(self.value, item)
append_arg = False
except AttributeError:
try:
prop = locals()[item]
except KeyError:
prop = getattr(__builtins__, item)
if callable(prop):
def fn(*args, **kwargs):
orig = list(args)
if append_arg:
if self.index == -1:
orig.append(self.value)
else:
orig.insert(self.index, self.value)
return Chain(prop(*orig, **kwargs), index=self.index)
return fn
else:
return Chain(prop, index=self.index)
Thread each result as first arg
file = Chain(__file__).open('r').readlines().value
Thread each result as last arg
result = Chain(range(0, 100), index=-1).map(lambda x: x * x).reduce(lambda x, y: x + y).value

How To make decorator get invoked on a recursive function call?

So here's an extension to this question: https://stackoverflow.com/a/37568895/2290820
on how to optionally Enable or Disable Decorator on a Function.
On those lines, I came up with something like this to make decorator get invoked on a recursive call:
def deco(f):
def fattr(attr):
f.attr = attr
def closure(*args):
f(*args)
f.unwrap = f
f.closure = closure
return f
return fattr
#deco
def printa(x):
if x > 1:
print x
return printa(x-1)
else:
print x
return
printa({1:1})(5)
# do the same call w/o deocorator
def finta(x):
if x > 1:
print x
return finta(x-1)
else:
print x
return
finta(5) # this works
to experiment with decorators on a recursive function. Clearly, printa recursive version is not behaving the way it should be.
I could do
g = printa({1:1})
g.closure(5)
to turn on the decorator option or not use that option. Anyway, regardless of good or bad design, How can I make decorator get invoked on a recursive call?
In your deco you have an assignment f.attr = attr that "eats" your argument after first recursive call. Your should modify your recursive call this way:
def deco(f):
def fattr(attr):
f.attr = attr
def closure(*args):
f(*args)
f.unwrap = f
f.closure = closure
return f
return fattr
#deco
def printa(x):
if x > 1:
print x
return printa(None)(x-1) # None will be assigned to f.attr
else:
print x
return
printa({1:1})(5)
5
4
3
2
1

Pointfree function combination in Python

I have some predicates, e.g.:
is_divisible_by_13 = lambda i: i % 13 == 0
is_palindrome = lambda x: str(x) == str(x)[::-1]
and want to logically combine them as in:
filter(lambda x: is_divisible_by_13(x) and is_palindrome(x), range(1000,10000))
The question is now: Can such combination be written in a pointfree style, such as:
filter(is_divisible_by_13 and is_palindrome, range(1000,10000))
This has of course not the desired effect because the truth value of lambda functions is True and and and or are short-circuiting operators. The closest thing I came up with was to define a class P which is a simple predicate container that implements __call__() and has the methods and_() and or_() to combine predicates. The definition of P is as follows:
import copy
class P(object):
def __init__(self, predicate):
self.pred = predicate
def __call__(self, obj):
return self.pred(obj)
def __copy_pred(self):
return copy.copy(self.pred)
def and_(self, predicate):
pred = self.__copy_pred()
self.pred = lambda x: pred(x) and predicate(x)
return self
def or_(self, predicate):
pred = self.__copy_pred()
self.pred = lambda x: pred(x) or predicate(x)
return self
With P I can now create a new predicate that is a combination of predicates like this:
P(is_divisible_by_13).and_(is_palindrome)
which is equivalent to the above lambda function. This comes closer to what I'd like to have, but it is also not pointfree (the points are now the predicates itself instead of their arguments). Now the second question is: Is there a better or shorter way (maybe without parentheses and dots) to combine predicates in Python than using classes like P and without using (lambda) functions?
You can override the & (bitwise AND) operator in Python by adding an __and__ method to the P class. You could then write something like:
P(is_divisible_by_13) & P(is_palindrome)
or even
P(is_divisible_by_13) & is_palindrome
Similarly, you can override the | (bitwise OR) operator by adding an __or__ method and the ~ (bitwise negation) operator by adding a __not__ method. Note that you cannot override the built-in and, or and not operator, so this is probably as close to your goal as possible. You still need to have a P instance as the leftmost argument.
For sake of completeness, you may also override the in-place variants (__iand__, __ior__) and the right-side variants (__rand__, __ror__) of these operators.
Code example (untested, feel free to correct):
class P(object):
def __init__(self, predicate):
self.pred = predicate
def __call__(self, obj):
return self.pred(obj)
def __copy_pred(self):
return copy.copy(self.pred)
def __and__(self, predicate):
def func(obj):
return self.pred(obj) and predicate(obj)
return P(func)
def __or__(self, predicate):
def func(obj):
return self.pred(obj) or predicate(obj)
return P(func)
One more trick to bring you closer to point-free nirvana is the following decorator:
from functools import update_wrapper
def predicate(func):
"""Decorator that constructs a predicate (``P``) instance from
the given function."""
result = P(func)
update_wrapper(result, func)
return result
You can then tag your predicates with the predicate decorator to make them an instance of P automatically:
#predicate
def is_divisible_by_13(number):
return number % 13 == 0
#predicate
def is_palindrome(number):
return str(number) == str(number)[::-1]
>>> pred = (is_divisible_by_13 & is_palindrome)
>>> print [x for x in xrange(1, 1000) if pred(x)]
[494, 585, 676, 767, 858, 949]
Basically, your approach seems to be the only feasible one in Python. There's a python module on github using roughly the same mechanism to implement point-free function composition.
I have not used it, but at a first glance his solution looks a bit nicer (because he uses decorators and operator overloading where you use a class and __call__).
But other than that it's not technically point-free code, it's just "point-hidden" if you will. Which may or may not be enough for you.
You could use the Infix operator recipe:
AND = Infix(lambda f, g: (lambda x: f(x) and g(x)))
for n in filter(is_divisible_by_13 |AND| is_palindrome, range(1000,10000)):
print(n)
yields
1001
2002
3003
4004
5005
6006
7007
8008
9009
Python already has a way of combining two functions: lambda. You can easily make your own compose and multiple compose functions:
compose2 = lambda f,g: lambda x: f(g(x))
compose = lambda *ff: reduce(ff,compose2)
filter(compose(is_divisible_by_13, is_palindrome, xrange(1000)))
That would be my solution:
class Chainable(object):
def __init__(self, function):
self.function = function
def __call__(self, *args, **kwargs):
return self.function(*args, **kwargs)
def __and__(self, other):
return Chainable( lambda *args, **kwargs:
self.function(*args, **kwargs)
and other(*args, **kwargs) )
def __or__(self, other):
return Chainable( lambda *args, **kwargs:
self.function(*args, **kwargs)
or other(*args, **kwargs) )
def is_divisible_by_13(x):
return x % 13 == 0
def is_palindrome(x):
return str(x) == str(x)[::-1]
filtered = filter( Chainable(is_divisible_by_13) & is_palindrome,
range(0, 100000) )
i = 0
for e in filtered:
print str(e).rjust(7),
if i % 10 == 9:
print
i += 1
And this is my result:
0 494 585 676 767 858 949 1001 2002 3003
4004 5005 6006 7007 8008 9009 10101 11011 15951 16861
17771 18681 19591 20202 21112 22022 26962 27872 28782 29692
30303 31213 32123 33033 37973 38883 39793 40404 41314 42224
43134 44044 48984 49894 50505 51415 52325 53235 54145 55055
59995 60606 61516 62426 63336 64246 65156 66066 70707 71617
72527 73437 74347 75257 76167 77077 80808 81718 82628 83538
84448 85358 86268 87178 88088 90909 91819 92729 93639 94549
95459 96369 97279 98189 99099

Categories