How to dynamically add a reify property to a Python3 object - python

I'm trying to do some advanced wizardry and I don't completely understand the problem which is no doubt why I'm having trouble solving it.
If I do something super hacky and let the child (DailyPic) know about it's parent (Site) then I can make everything work, but I can't figure out how to dynamically do this just from Site.
I'm trying to dynamically add the fantastic reify (slightly modified) to DailyPic but have the DailyPic instance call a method on an instance of Site.
hacky but it works
class reify():
def __init__(self, wrapped, name=None):
self.wrapped = wrapped
if name is None:
from functools import update_wrapper
update_wrapper(self, wrapped)
else:
self.wrapped.__name__ = name
def __get__(self, inst, objtype=None):
if inst is None:
return self
val = self.wrapped(inst)
setattr(inst, self.wrapped.__name__, val)
return val
class Site:
def __init__(self):
self.counter = 0
def mk_filename(self, pic):
self.counter += 1
return f"{self.__class__.__name__}-{self.counter}.{pic.ext()}"
def save(self):
pic = DailyPic()
from functools import partial
pic._mk_filename = partial(self.mk_filename, pic)
return pic
class DailyPic:
def ext(self):
return 'gif' # simplified for example
#reify
def filename(self):
return self._mk_filename() # YUCK!!!
# works but ugly...
class Site1(Site): pass
class Site2(Site): pass
pic1 = Site1().save()
pic2 = Site2().save()
print(pic1.filename) # Site1-1
print(pic2.filename) # Site2-1
I'm trying to get to something like this:
class Site:
def save(self):
pic = DailyPic()
DailyPic.filename = reify(
functools.partial(lambda pic: self.mk_filename(pic)),
'filename'
)
return pic
# sorta works...
print(pic1.filename) # Site2-1 , should be Site1-1
print(pic2.filename) # Site2-2 , should be Site2-1
If an advanced wizard could chime in that would be fantastic!

unless you really, really, need it to be a property, just ignore reify and properties and set a function on each instance that overwrites itself when run once.
class Site:
def save(self):
pic = DailyPic()
def filename(pic_self):
val = self.mk_filename(pic_self)
pic_self.filename = lambda _: val
return val
pic.filename = filename
return pic
then it's just:
print(pic1.filename())
if you really want it to be a property you could always set pic._filename = filename and have a property on DailyPic that returns self._filename(). which is basically what you were doing to begin with.

Related

completion/intellisense on (*args ,**kwargs) functions

Is there a way to have completion/intellisense on (*args ,**kwargs) functions?
For instance:
class GetVar(GetVarInterface):
#classmethod
def fromcustom(cls,locorvar,offset=0,varType="int", name=None,deref=False,member=None):
return GetVarCustom(locorvar,offset,varType, name,deref,member)
class GetVarCustom(GetVar):
def __init__(self,locorvar,offset=0,varType="int", name=None,deref=False,member=None):
I wanted to implement this without specifying every argument of the constructor (For example using *vars, **kwargs) but didn't want to lose completion/intellisense abilities. Is there a way?
The disadvantage in the current implementation is that you would have to replicate the signature twice for every change...
The only option is to add a comment under the function to hint the arguments, otherwise you can't; if the ide is reading that a function has undefined arguments, it will show you that it's undefined.
A "solution" is to just use the common arguments and pass the rest as kwargs, or you can keep the original init.
class Single_Init:
def __init__(self, val_a, val_b, name=None):
self.val_a = val_a
self.val_b = val_b
self.name = name
class Single_Init_B(Single_Init):
# The previous contructor is calld
def get_result(self):
return self.val_a + self.val_b
class Split_Const:
def op_offset(self, offset):
self.offset = offset
def __init__(self, name, member=None, **kwargs):
""" You olso can hint in a func coment """
self.name = name
self.member = member
if 'offset' in kwargs:
self.offset = kwargs['offset']
else:
self.offset = None
if __name__ == '__main__':
single = Single_Init_B(2, 3)
print('Single:', single.get_result())
split = Split_Const('Name')
split.op_offset(0.5)
print('Split:', split.offset)
Got the solution outside this site..
#functools.wraps(functools.partial(GetVarCustom.__init__,1))
def f(*args,**kwargs):
return GetVarCustom(*args,**kwargs)
Of course, it would have been easier in case of a standard function. However, you need to update the assigned attribute of wraps. Otherwise it will change the function name.
#functools.wraps(GetVarCustom.value,assigned=['__doc__'])
def getvalue(*args,**kwargs):
return self_custom.value(*args,**kwargs)

Python: can I generically combine base class and derived class [duplicate]

Say I have 2 different implementations of a class
class ParentA:
def initialize(self):
pass
def some_event(self):
pass
def order(self, value):
# handle order in some way for Parent A
class ParentB:
def initialize(self):
pass
def some_event(self):
pass
def order(self, value):
# handle order in another for Parent B
How can I dynamically let some 3rd class inherit from either ParentA or ParentB based on something like this?
class MyCode:
def initialize(self):
self.initial_value = 1
def some_event(self):
# handle event
order(self.initial_value)
# let MyCode inherit from ParentA and run
run(my_code, ParentA)
Simply store the class-object in a variable (in the example below, it is named base), and use the variable in the base-class-spec of your class statement.
def get_my_code(base):
class MyCode(base):
def initialize(self):
...
return MyCode
my_code = get_my_code(ParentA)
Also, you can use type builtin. As callable, it takes arguments: name, bases, dct (in its simplest form).
def initialize(self):
self.initial_value = 1
def some_event(self):
# handle event
order(self.initial_value)
subclass_body_dict = {
"initialize": initialize,
"some_event": some_event
}
base_class = ParentA # or ParentB, as you wish
MyCode = type("MyCode", (base_class, ), subclass_body_dict)
This is more explicit than snx2 solution, but still - I like his way better.
PS. of course, you dont have to store base_class, nor subclass_body_dict, you can build those values in type() call like:
MyCode = type("MyCode", (ParentA, ), {
"initialize": initialize,
"some_event": some_event
})
Just as a quick copy-and-paste-ready snippet, I've added the comments from shx2's answer to create this (memoized with a created_classes dict attribute, so that the classes created by successive identical calls with the same class will give identical classes):
class ParentA:
val = "ParentA"
class ParentB:
val = "ParentB"
class DynamicClassCreator():
def __init__(self):
self.created_classes = {}
def __call__(self, *bases):
rep = ",".join([i.__name__ for i in bases])
if rep in self.created_classes:
return self.created_classes[rep]
class MyCode(*bases):
pass
self.created_classes[rep] = MyCode
return MyCode
creator = DynamicClassCreator()
instance1 = creator(ParentA, ParentB)()
print(instance1.val) #prints "ParentA"
instance2 = creator(ParentB, ParentA)()
print(instance2.val) #prints "ParentB"
If you wanted to get fancy you could even make DynamicClassCreator a Singleton: https://stackoverflow.com/a/7346105/5122790
As an alternative to Chris's answer implementing the memoisation suggestion for shx2's answer, I'd prefer to use a memoize decorator (the end result is still a class but it's clearer to me that the function is the interface), and also use setdefault to simplify adding to the memo dict, and do not convert the names to string but use the tuple bases itself as the key, simplifying the code to:
class Memoize:
def __init__(self, f):
self.f = f
self.memo = {}
def __call__(self, *args):
return self.memo.setdefault(args, self.f(*args))
class ParentA:
def initialize(self):
pass
class ParentB:
def initialize(self):
pass
#Memoize
def get_my_code(base):
class MyCode(base):
def initialize(self):
pass
return MyCode
a1 = get_my_code(ParentA)
a2 = get_my_code(ParentA)
b1 = get_my_code(ParentB)
print(a1 is a2) # True
print(a1 is b1) # False
(Not a good example as the code provided doesn't actually do anything other than overwrite the parent class's initialize method...)

Why cannot I not create an object like this in Python?

Question about objects in python. I have created the following object....
class http(object):
def __init__(self):
self._resource = None
self._response = None
#property
def resource(self):
return self._resource
#resource.setter
def resource(self, value):
self._resource = "http://127.0.0.1:8000/%s" % value
def get(self, resource=None):
self.resource = resource
self._response = requests.get(self.resource)
return self._response
Init does not need anything at this stage so I was hoping I could create the object like this....
content = http.get("users/")
but it won't let me do this, instead I have to pass use the syntax http() but pass nothing...
content = http().get("users/")
which seems silly if I don't pass anything to __init__. I'm wondering how the a Python package like requests achieves the following syntax....
requests.get('https://api.github.com/user')
without doing this...
requests().get('https://api.github.com/user')
why, what does requests package do different?
Requests defines some extra methods that create an instance of Requests.request behind the scenes. You can do the same thing for your http class.
class http(object):
def get(self, resource=None):
self.resource = resource
self._response = requests.get(self.resource)
return self._response
def get(resource=None):
temp_instance = http()
return temp_instance.get(resource)

How to access a class method from a property definition

I have a model where I want to use a class method to set the default of for a property:
class Organisation(db.Model):
name=db.StringProperty()
code=db.StringProperty(default=generate_code())
#classmethod
def generate_code(cls):
import random
codeChars='ABCDEF0123456789'
while True: # Make sure code is unique
code=random.choice(codeChars)+random.choice(codeChars)+\
random.choice(codeChars)+random.choice(codeChars)
if not cls.all().filter('code = ',code).get(keys_only=True):
return code
But I get a NameError:
NameError: name 'generate_code' is not defined
How can I access generate_code()?
As I said in a comment, I would use a classmethod to act as a factory and always create you entity through there. It keeps things simpler and no nasty hooks to get the behaviour you want.
Here is a quick example.
class Organisation(db.Model):
name=db.StringProperty()
code=db.StringProperty()
#classmethod
def generate_code(cls):
import random
codeChars='ABCDEF0123456789'
while True: # Make sure code is unique
code=random.choice(codeChars)+random.choice(codeChars)+\
random.choice(codeChars)+random.choice(codeChars)
if not cls.all().filter('code = ',code).get(keys_only=True):
return code
#classmethod
def make_organisation(cls,*args,**kwargs):
new_org = cls(*args,**kwargs)
new_org.code = cls.generate_code()
return new_org
import random
class Test(object):
def __new__(cls):
cls.my_attr = cls.get_code()
return super(Test, cls).__new__(cls)
#classmethod
def get_code(cls):
return random.randrange(10)
t = Test()
print t.my_attr
You need specify the class name: Organisation.generate_code()

Dynamic/runtime method creation (code generation) in Python

I need to generate code for a method at runtime. It's important to be able to run arbitrary code and have a docstring.
I came up with a solution combining exec and setattr, here's a dummy example:
class Viking(object):
def __init__(self):
code = '''
def dynamo(self, arg):
""" dynamo's a dynamic method!
"""
self.weight += 1
return arg * self.weight
'''
self.weight = 50
d = {}
exec code.strip() in d
setattr(self.__class__, 'dynamo', d['dynamo'])
if __name__ == "__main__":
v = Viking()
print v.dynamo(10)
print v.dynamo(10)
print v.dynamo.__doc__
Is there a better / safer / more idiomatic way of achieving the same result?
Based on Theran's code, but extending it to methods on classes:
class Dynamo(object):
pass
def add_dynamo(cls,i):
def innerdynamo(self):
print "in dynamo %d" % i
innerdynamo.__doc__ = "docstring for dynamo%d" % i
innerdynamo.__name__ = "dynamo%d" % i
setattr(cls,innerdynamo.__name__,innerdynamo)
for i in range(2):
add_dynamo(Dynamo, i)
d=Dynamo()
d.dynamo0()
d.dynamo1()
Which should print:
in dynamo 0
in dynamo 1
Function docstrings and names are mutable properties. You can do anything you want in the inner function, or even have multiple versions of the inner function that makedynamo() chooses between. No need to build any code out of strings.
Here's a snippet out of the interpreter:
>>> def makedynamo(i):
... def innerdynamo():
... print "in dynamo %d" % i
... innerdynamo.__doc__ = "docstring for dynamo%d" % i
... innerdynamo.__name__ = "dynamo%d" % i
... return innerdynamo
>>> dynamo10 = makedynamo(10)
>>> help(dynamo10)
Help on function dynamo10 in module __main__:
dynamo10()
docstring for dynamo10
Python will let you declare a function in a function, so you don't have to do the exec trickery.
def __init__(self):
def dynamo(self, arg):
""" dynamo's a dynamic method!
"""
self.weight += 1
return arg * self.weight
self.weight = 50
setattr(self.__class__, 'dynamo', dynamo)
If you want to have several versions of the function, you can put all of this in a loop and vary what you name them in the setattr function:
def __init__(self):
for i in range(0,10):
def dynamo(self, arg, i=i):
""" dynamo's a dynamic method!
"""
self.weight += i
return arg * self.weight
setattr(self.__class__, 'dynamo_'+i, dynamo)
self.weight = 50
(I know this isn't great code, but it gets the point across). As far as setting the docstring, I know that's possible but I'd have to look it up in the documentation.
Edit: You can set the docstring via dynamo.__doc__, so you could do something like this in your loop body:
dynamo.__doc__ = "Adds %s to the weight" % i
Another Edit: With help from #eliben and #bobince, the closure problem should be solved.
class Dynamo(object):
def __init__(self):
pass
#staticmethod
def init(initData=None):
if initData is not None:
dynamo= Dynamo()
for name, value in initData.items():
code = '''
def %s(self, *args, **kwargs):
%s
''' % (name, value)
result = {}
exec code.strip() in result
setattr(dynamo.__class__, name, result[name])
return dynamo
return None
service = Dynamo.init({'fnc1':'pass'})
service.fnc1()
A bit more general solution:
You can call any method of an instance of class Dummy.
The docstring is generated based on the methods name.
The handling of any input arguments is demonstrated, by just returning them.
Code
class Dummy(object):
def _mirror(self, method, *args, **kwargs):
"""doc _mirror"""
return args, kwargs
def __getattr__(self, method):
"doc __getattr__"
def tmp(*args, **kwargs):
"""doc tmp"""
return self._mirror(method, *args, **kwargs)
tmp.__doc__ = (
'generated docstring, access by {:}.__doc__'
.format(method))
return tmp
d = Dummy()
print(d.test2('asd', level=0), d.test.__doc__)
print(d.whatever_method(7, 99, par=None), d.whatever_method.__doc__)
Output
(('asd',), {'level': 0}) generated docstring, access by test.__doc__
((7, 99), {'par': None}) generated docstring, access by whatever_method.__doc__
Pardon me for my bad English.
I recently need to generate dynamic function to bind each menu item to open particular frame on wxPython. Here is what i do.
first, i create a list of mapping between the menu item and the frame.
menus = [(self.menuItemFile, FileFrame), (self.menuItemEdit, EditFrame)]
the first item on the mapping is the menu item and the last item is the frame to be opened. Next, i bind the wx.EVT_MENU event from each of the menu item to particular frame.
for menu in menus:
f = genfunc(self, menu[1])
self.Bind(wx.EVT_MENU, f, menu[0])
genfunc function is the dynamic function builder, here is the code:
def genfunc(parent, form):
def OnClick(event):
f = form(parent)
f.Maximize()
f.Show()
return OnClick

Categories