Python: how to dynamically set function closure environment - python

I want to declare a function dynamically and I want to wrap any access to global variables OR alternatively define which variables are free and wrap any access to free variables.
I'm playing around with code like this:
class D:
def __init__(self):
self.d = {}
def __getitem__(self, k):
print "D get", k
return self.d[k]
def __setitem__(self, k, v):
print "D set", k, v
self.d[k] = v
def __getattr__(self, k):
print "D attr", k
raise AttributeError
globalsDict = D()
src = "def foo(): print x"
compiled = compile(src, "<foo>", "exec")
exec compiled in {}, globalsDict
f = globalsDict["foo"]
print(f)
f()
This produces the output:
D set foo <function foo at 0x10f47b758>
D get foo
<function foo at 0x10f47b758>
Traceback (most recent call last):
File "test_eval.py", line 40, in <module>
f()
File "<foo>", line 1, in foo
NameError: global name 'x' is not defined
What I want is somehow catch the access to x with my dict-like wrapper D. How can I do that?
I don't want to predefine all global variables (in this case x) because I want to be able to load them lazily.

What you're looking for is object proxying.
Here is a recipe for an object proxy which supports pre- and post- call hooks:
http://code.activestate.com/recipes/366254-generic-proxy-object-with-beforeafter-method-hooks/
Create a subclass that doesn't actually load the object until the first time the _pre hook is called. Anything accessing the object will cause the real object to be loaded, and all calls will appear to be handled directly by the real object.

Try this out
class GlobalDict(object):
def __init__(self, **kwargs):
self.d = kwargs
def __getitem__(self, key):
print 'getting', key
return self.d[key]
def __setitem__(self, key, value):
print 'setting', key, 'to', value
if hasattr(value, '__globals__'):
value.__globals__.update(self.d)
self.d[key] = value
for v in self.d.values():
if v is not value:
if hasattr(v, '__globals__'):
v.__globals__.update(self.d)
def __delitem__(self, key):
print 'deling', key
del self.d[key]
for v in self.d.values():
if hasattr(v, '__globals__'):
del v.__globals__[key]
>>> gd = GlobalDict()
>>> src = 'def foo(): print x'
>>> compiled = compile(src, '<foo>', 'exec')
>>> exec compiled in {}, gd
setting foo to <function foo at 0x102223b18>
>>> f = gd['foo']
getting foo
>>> f
<function foo at 0x102223b18>
>>> f() # This one will throw an error
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "<foo>", line 1, in foo
NameError: global name 'x' is not defined
>>> gd['x'] = 1
setting x to 1
>>> f()
1
>>> del gd['x'] # removes 'x' from the globals of anything in gd
>>> f() # Will now fail again
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "<foo>", line 1, in foo
NameError: global name 'x' is not defined

Related

Odd behaviour within __new__ method of Python metaclass [duplicate]

This question already has answers here:
Local variables in nested functions
(4 answers)
Closed 7 years ago.
I'm expereriencing an odd behaviour within the __new__ method of a Python metaclass. I know the following code works fine:
def create_property(name, _type):
def getter(self):
return self.__dict__.get(name)
def setter(self, val):
if isinstance(val, _type):
self.__dict__[name] = val
else:
raise ValueError("Type not correct.")
return property(getter, setter)
class Meta(type):
def __new__(cls, clsname, bases, clsdict):
for key, val in clsdict.items():
if isinstance(val, type):
clsdict[key] = create_property(key, val)
return super().__new__(cls, clsname, bases, clsdict)
But when avoiding defining the define_property function and putting the code inside the for within the __new__ weird stuff happens. The following is the modified code:
class Meta(type):
def __new__(meta, name, bases, clsdict):
for attr, data_type in clsdict.items():
if not attr.startswith("_"):
def getter(self):
return self.__dict__[attr]
def setter(self, val):
if isinstance(val, data_type):
self.__dict__[attr] = val
else:
raise ValueError(
"Attribute '" + attr + "' must be " + str(data_type) + ".")
clsdict[attr] = property(getter, setter)
return super().__new__(meta, name, bases, clsdict)
The idea is being able to create classes that behave like forms, i.e:
class Company(metaclass=Meta):
name = str
stock_value = float
employees = list
if __name__ == '__main__':
c = Company()
c.name = 'Apple'
c.stock_value = 125.78
c.employees = ['Tim Cook', 'Kevin Lynch']
print(c.name, c.stock_value, c.employees, sep=', ')
When executed, different errors start to happen, such as:
Traceback (most recent call last):
File "main.py", line 37, in <module>
c.name = 'Apple'
File "main.py", line 13, in setter
if isinstance(val, data_type):
TypeError: isinstance() arg 2 must be a type or tuple of types
Traceback (most recent call last):
File "main.py", line 38, in <module>
c.stock_value = 125.78
File "main.py", line 17, in setter
"Attribute '" + attr + "' must be " + str(data_type) + ".")
ValueError: Attribute 'name' must be <class 'str'>.
Traceback (most recent call last):
File "main.py", line 37, in <module>
c.name = 'Apple'
File "main.py", line 17, in setter
"Attribute '" + attr + "' must be " + str(data_type) + ".")
ValueError: Attribute 'stock_value' must be <class 'float'>.
Traceback (most recent call last):
File "main.py", line 37, in <module>
c.name = 'Apple'
File "main.py", line 17, in setter
"Attribute '" + attr + "' must be " + str(data_type) + ".")
ValueError: Attribute 'employees' must be <class 'list'>.
So, what is going on here? What is the difference between having the create_property defined separately than within the __new__ method?
That's due to how the scoping and variable binding works in python. You define a function in a loop which accesses a local variable; but this local variable is looked up during execution of the function, not bound during its definition:
fcts = []
for x in range(10):
def f(): print x
fcts.append(f)
for f in fcts: f() #prints '9' 10 times, as x is 9 after the loop
As you've discovered, you can simply create a closure over the current loop value by using an utility function:
fcts = []
def make_f(x):
def f(): print x
return f
for x in range(10):
fcts.append(make_f(x))
for f in fcts: f() #prints '0' to '9'
Another possibility is to (ab)use a default argument, as those are assigned during function creation:
fcts = []
for x in range(10):
def f(n=x): print n
fcts.append(f)
for f in fcts: f() #prints '0' to '9'

Setting an accessible Python function's variable from outside a function

I am curious how I can assign a variable from outside a function object. Before I tried it, I thought I knew how it can be done.
>>> def f():
... print(x)
...
>>> f.x=2
>>> f()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 2, in f
NameError: name 'x' is not defined
>>>
I then tried:
>>> class c:
... def f(self):
... print(x)
...
>>> y=c();y.x=2;y.f()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 3, in f
NameError: name 'x' is not defined
The same error. Now, I thought, this just has to work:
>>> class c:
... def makef(self):
... return lambda x=x: print(x)
...
>>> y = c();y.x = 2;y.makef()()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 3, in makef
NameError: name 'x' is not defined
Alas, it did not. How can I assign a variable accessible to a function after the function has been defined? This is just a curiosity. There's really no reason (that I can think of) for not just passing a parameter.
class Name:
def __init__(self):
self.x = None
def f(self):
print self.x
a = Name()
a.x = 'Value'
a.f()
output
$ Value
I discovered a way of doing what I was trying to accomplish. I need to modify the object's dictionary:
>>> def f():
... print(x)
...
>>> f.__dict__['x'] = 2
>>> f()
2
Basically if you define your variable in the main program, you can then use the global keyword to reference it.
bah = 1
def function():
global bah
bah = 2

Generate methods dynamically giving the keys of a dictionary

Looking to find a solution(not sure if it exists!) to the following situation:
Starting point is a dictionary dict = {k1:v1, k2:v2,...,kn:vn} where n is not fixed.
Is there a way to write a generic class that will have n methods generated dynamically that can be called as in the following example:
class example(dict):
example.k1()
example.k2()
.
.
.
example.kn()
Eachexample.ki()where 1<=i<=n, should return the corresponding vi.
Instead of creating so many method dynamically better override __getattr__ method of your class and return a callable from there:
class Example(dict):
def __getattr__(self, k):
if k in self:
return lambda: self[k]
raise TypeError('Example object has not attribute {!r}'.format(k))
Note that for keys like keys(), items(), etc __getattr__ won't be called as they are found in the class by __getattribute__ itself. And better don't name any of your keys after them.
Demo:
>>> d = Example(a=1, b=2, c=3)
>>> d.a()
1
>>> d.b()
2
>>> d.foo()
Traceback (most recent call last):
File "<pyshell#14>", line 1, in <module>
d.foo()
File "/home/ashwini/py/so.py", line 7, in __getattr__
raise TypeError('Example object has not attribute {!r}'.format(k))
TypeError: Example object has not attribute 'foo'
What you want is to override the __getattr__ function described here.
To take your example:
class example(dict):
def __getattr__(self, name):
return lambda: self[name]
This allows you to do:
e = example()
e["foo"] = 1
print e.foo()
==> 1
I think adding a method to class dynamically can help u.
class example(object) :
dict={'k1':'v1','k2':'v2','k3':'v3','kn':'vn'}
def getvalue(self,key) :
return self.dict[key]
if __name__=="__main__" :
e = example()
e.method1=e.getvalue # this is adding a method to example class dynamically.
print e.method1('k1')
e.method2=e.getvalue
print e.method2('k2')
e.method3=e.getvalue
print e.method3('k3')
e.methodn=e.getvalue
print e.methodn('kn')
this outputs
v1
v2
v3
vn

Change what dictionary serves as a function's global scope

I want to make an #pure decorator for Python, part of this is being able to selectively disallow access to the global scope of the function.
Is there a way to programmatically change which dictionary thing serves as a function's global/external scope?
So for instance in the following I want to be able to intercept the access to f in h and throw an error, but I want to allow access to g because it's a pure function.
def f():
print("Non-pure function")
#pure
def g(i):
return i + 1
#pure
def h(i):
f()
return g(i)
You would have to create a new function object from the old one:
newfunc = type(h)(h.__code__, cleaned_globals, h.__name__, h.__defaults__, h.__closure__)
Here, cleaned_globals is a dictionary that is to be used as the global namespace for the newly created function object. All other arguments echo the original function's.
cleaned_globals could be based on a copy of h.__globals__, of course.
Demo:
>>> def h(i):
... f()
... return g(i)
...
>>> def g(i):
... return i + 1
...
>>> def f():
... print("Non-pure function")
...
>>> h(1)
Non-pure function
2
>>> cleaned_globals = {'g': g}
>>> newfunc = type(h)(h.__code__, cleaned_globals, h.__name__, h.__defaults__, h.__closure__)
>>> newfunc(1)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 2, in h
NameError: global name 'f' is not defined
>>> cleaned_globals['f'] = lambda: print('Injected function')
>>> newfunc(1)
Injected function
2

Why is getattr() so much slower than self.__dict__.get()?

The example below is from a REST database driver on Python 2.7.
In the __setattr__ method below, if I use the commented out getattr() line, it reduces the object instantiation performance from 600 rps to 230.
Why is getattr() so much slower than self.__dict__.get() in this case?
class Element(object):
def __init__(self, client):
self._client = client
self._data = {}
self._initialized = True
def __setattr__(self, key, value):
#_initialized = getattr(self, "_initialized", False)
_initialized = self.__dict__.get("_initialized", False)
if key in self.__dict__ or _initialized is False:
# set the attribute normally
object.__setattr__(self, key, value)
else:
# set the attribute as a data property
self._data[key] = value
In short: because getattr(foo,bar) does the same thing as foo.bar, which is not the same thing as just accessing the __dict__ property (for a start, getattr has to select the right __dict__, but there's a whole lot more going on).
An example for illustration:
>>> class A:
... a = 1
...
>>> class B(A):
... b = 2
...
>>> dir(B)
['__doc__', '__module__', 'a', 'b']
>>> B.a
1
>>> B.__dict__
{'__module__': '__main__', 'b': 2, '__doc__': None}
>>> B.__dict__['a']
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
KeyError: 'a'
>>> B.__dict__.get('a')
>>>
Details contained in, or linked to here: http://docs.python.org/reference/datamodel.html (search for "getattr").

Categories