I'm wondering if there is any more pythonic call in func_wrap to, without modifying func or the func_wrap header, call the function func?
class Foo(object):
a = 1
b = 2
class Bar(object):
c = 3
def func_wrap(foo_obj, bar_obj):
return func(a=foo_obj.a, b=foo_obj.b, c=bar_obj.c)
def func(a, b, c):
return a + b + c
Something more along the lines of:
def func_wrap(foo_obj, bar_obj):
return func(**foo_obj.__dict__ , **bar_obj.__dict__)
You could use the vars function:
def func_wrap(foo_obj):
return func(**vars(foo_obj))
which comes down to the same thing.
Related
I've written a code with 3 different classes - those classes initializes functions and connected between each other (class B calls function a_calc() from A class; class C calls function b_calc() from B class). I want to create Calculator() function that instantiates all of those classes (A, B, C), uses class functions in one line of code, and returns the result of execution (to be able to execute the program creating only one object Main). Here the snippet of code (simplified):
class A(object):
def __init__(self):
self.a = 10
def a_calc(self):
a = self.a + 1
return a
class B(object):
def __init__(self, A):
self.A = A
def b_calc(self):
b = self.A.a_calc() + 2
return b
class C(object):
def __init__(self, B):
self.B = B
def c_calc(self):
c = self.B().b_calc + 3
return c
class Calculator(A, B, C):
def __init__(self):
result = A.a_calc() + B.b_calc() + C.c_calc()
print(result)
calc = Calculator()
Yet, I'm getting an error:
Traceback (most recent call last):
File "/home/taras/PycharmProjects/ResSysPymage/test#2.py", line 31, in <module>
calc = Calculator()
File "/home/taras/PycharmProjects/ResSysPymage/test#2.py", line 27, in __init__
result = A.a_calc() + B.b_calc() + C.c_calc()
TypeError: a_calc() missing 1 required positional argument: 'self'
How can I make my idea come true? I'd be grateful for ideas:)
You seem to be confusing classes, instances of classes and composition vs. multiple inheritance.
Perhaps you're looking for something like this:
class A:
def __init__(self):
self.a = 10
def a_calc(self):
a = self.a + 1
return a
class B:
def __init__(self, a: A):
self.a = a
def b_calc(self):
b = self.a.a_calc() + 2
return b
class C:
def __init__(self, b: B):
self.b = b
def c_calc(self):
c = self.b.b_calc() + 3
return c
def calculator():
a = A()
b = B(a)
c = C(b)
return a.a_calc() + b.b_calc() + c.c_calc()
print(calculator())
I'm trying to find an elegant pythonic solution for the following design issue:
Within a class I want to use properties as default parameter.
So, one could do something like:
Class A:
def __init__(self, a=0, b=1):
self.a = a
self.b = b
def func(self, a=None, b=None):
a = a or self.a
b = b or self.b
# do something with a and b
But since this seems like potential boilerplate code, I went ahead and wrote a generic function outside of my class.
def fallback_parameters(obj, **args):
return [args[arg] or getattr(obj, arg) for arg in args]
and use
def func(self, a=None, b=None):
a, b = fallback_parameters(self, a=a, b=b)
# do something with a and b
Is there a better/cleaner way to achieve this? An existing solution?
I'm tempted to create a decorator, so I don't need to type something like:
a, b, c, d, e = fallback_parameters(self, a=a, b=b, c=c, d=d, e=e)
Just for illustration if you wanted it to change the object instance attibute (you say you do not) the decorator would be like this:
def fallback_args(func):
def inner(obj, **kwargs):
if kwargs:
for key in kwargs:
setattr(obj, key, kwargs[key])
return func(obj, **kwargs)
return inner
class B:
def __init__(self, a=0, b=1):
self.a = a
self.b = b
#fallback_args
def func(self, a=None, b=None):
print("Do something with {} and {}".format(self.a, self.b))
#or
#a = self.a
#b = self.b
#print("Do something with {} and {}".format(a, b))
obj2 = B()
print(obj2.a)
print(obj2.b)
obj2.func()
print()
obj2 = B()
print(obj2.a)
print(obj2.b)
obj2.func(a=300)
print()
print(obj2.a)
print(obj2.b)
Outputs:
0
1
Do something with 0 and 1
0
1
Do something with 300 and 1
300
1
But not wanting to change the object, I don't think it makes sense just to avoid
explicit a = self.a or a like lines. Because if you don't list the parameters in the call, how would the decorator know what names do you mention in the methods' body to fallback/load them? You can say, the names are only the ones listed in the method signature. Now, that can be introspected, but I think I am seeing more convoluted work here than this deserves. a = self.a or a looks simple and explicit. So it is Pythonic too, although you probably could write the implicit assignment you want with some more work.
For an idea on how to introspect method args see a somewhat related decorator I wrote here to freeze methods args.
I like to pass a function which has 2 arguments to a class where 1 of the arguments are "predefined". When I call the function from within a class instance, I only want to give the second variable (because I already defined the first). Example:
def my_fun(a, b):
return a+b
class MyClass():
def __init__(self, fun):
self._fun = fun
def class_function(self, c):
return self._fun(c)
instance = MyClass(my_fun(a=5.0))
print(instance.class_function(10.0))
Is this possible?
Use partial from functools module.
from functools import partial
def my_fun(a, b):
return a + b
class MyClass():
def __init__(self, fun):
self._fun = fun
def class_function(self, c):
return self._fun(c)
instance = MyClass(partial(my_fun, 5.0))
print(instance.class_function(10.0))
Currently I use the following technique:
def Myfb(param1, param2, firstTime):
if firstTime:
global a = compute()
global b = compute2()
global a
global b
c = doNormalExecution(param1, param2, a, b)
Is there a more elegant way? I don't like creating globals
The technique is called memoization. The functools module has an lru_cache function that does what you want.
from functools import lru_cache
#lru_cache(maxsize=None)
def Myfb(param1, param2):
b = doNormalExecution(a)
The python docs have more information like what maxsize is and how lru_cache works so that you can implement it suitably.
You can use a generator:
def Myfb():
a = compute()
while True:
param1, param2 = yield
b = doNormalExecution(a, param1, param2)
yield b
Here you have a live example
Example code:
def compute():
return 10
def doNormalExecution(a, b, c):
return a + b + c
def Myfb():
a = compute()
while True:
param1, param2 = yield
b = doNormalExecution(a, param1, param2)
yield b
f = Myfb()
next(f)
for a, b in zip(range(10), range(10)):
print(f.send((a, b)))
next(f)
You can create a custom callable that will maintain it's own state:
class MyFB(object):
_sentinel = object()
def __init__(self):
self._a = self._sentinel
self._b = self._sentinel
def __call__(self, param1, param2, reinit=False):
if reinit or self._a is self._sentinel or self._b is self._sentinel:
self._a = compute_a()
self._b = compute_b()
return doNormalExecution(param1, param2, self._a, self._b)
myfb = MyFB()
# now use `myfb` like an ordinary function
Seeing as compute1() and compute2() don't take arguments, you could use functools to cache their results. (Unless they have side effects.)
from functools import cache
#cache
def compute():
#do complicated stuff first time called
return result
#cache
def compute2():
#do complicated stuff first time called
return result
def Myfb(param1, param2):
a = compute()
b = compute2()
c = doNormalExecution(param1, param2, a, b)
If you don't pass any parameters to a function, use this decorator (I had it lying around):
import functools
def lazy(func):
""" Decorator which only actually runs a function the first time it is
called and stores the result to be returned on later calls.
Usage:
#lazy
def func_to_be_only_run_once():
...
"""
FLAG = object()
result = FLAG
#functools.wraps(func)
def inner():
nonlocal result
if result is FLAG:
result = func()
return result
return inner
If you have one or more arguments that change (including self) use functools.lru_cache
Here's a cool way to do it using closures.
def closure(firstTime=True):
def Myfb():
nonlocal firstTime
if firstTime:
print("First time.")
firstTime = False
return Myfb
myfb = closure()
myfb()
myfb()
I'd like to be able to use a wrapper on a class method:
def wrapper(f):
def inner(*args,**kwargs):
print 'inner '+f.__name__
x = f(*args,**kwargs)
return x
return inner
class A:
#wrapper
def f1(x=55):
return x
print A().f1()
This returns <main.A instance at 0x05FF7530>
How can I return the result of the wrapped function A.f1()?
You have forgotten the self-argument:
class A:
#wrapper
def f1(self, x=55):
return x
If you truly want to call it as stated in the question, make it a classmethod:
class A:
#classmethod
#wrapper
def f1(self, x=55):
return x
>>> A.f1()
inner f1
55
>>>