How can I make inspect package to alter caller locals - python

I am trying to write a save/load command like the one in MATLAB (ability to save local variables to disk or load them into current context, or work space in MATLAB's terminology).
I wrote the following code, but it doesn't seem to work, as the variables in the outer scope are not replaced, probability because of a memory copy which takes place somewhere.
Here is the code:
import shelve
import logging
import inspect
logger = logging.getLogger()
def save_locals(filename, keys=None):
my_shelf = shelve.open(filename, 'n') # 'n' for new
caller_locals = inspect.stack()[1][0].f_locals
if keys is None:
keys = caller_locals.keys()
for key in keys:
try:
my_shelf[key] = caller_locals[key]
except TypeError:
#
# __builtins__, my_shelf, and imported modules can not be shelved.
#
print('ERROR shelving: {0}'.format(key))
my_shelf.close()
def load_locals(filename, keys=None):
my_shelf = shelve.open(filename)
caller_locals = inspect.stack()[1][0].f_locals
if keys is None:
keys = list(my_shelf.keys())
for key in keys:
try:
caller_locals[key] = my_shelf[key]
except ValueError:
print('cannot get variable %s'.format(key))
Here is the test which fails:
from unittest import TestCase
from .io import save_locals, load_locals
class TestIo(TestCase):
def test_save_load(self):
sanity = 'sanity'
an_int = 3
a_float = 3.14
a_list = [1, 2, 3]
a_dict = [{'a': 5, 'b': 3}]
save_locals('temp')
an_int = None
a_float = None
a_list = None
a_dict = None
load_locals('temp')
self.assertIn('an_int', locals())
self.assertIn('a_float', locals())
self.assertIn('a_list', locals())
self.assertIn('a_dict', locals())
self.assertEqual(an_int, 3)
self.assertEqual(a_float, 3.14)
self.assertEqual(a_list, [1, 2, 3])
self.assertEqual(a_dict, [{'a': 5, 'b': 3}])
When I break-point inside load_locals I can see it changes the f_locals dictionary but when the function returns they do not change.

No, you can't update local variables on the fly. The reason is because the local symbol table is saved as a C array for optimization and both locals() and frame.f_locals end up returning a copy to that local symbol table. The official response is that modifying locals() has undefined behavior. This thread talks a bit about it.
It ends up being extra weird because calling locals() or frame.f_locals returns the same dictionary each time, which gets re-synced at different times. Here just calling frame.f_locals resets the local
def test_locals():
frame = inspect.stack()[1][0]
caller_locals = frame.f_locals
caller_locals['an_int'] = 5
print(caller_locals)
_ = frame.f_locals
print(caller_locals)
def call_test_locals():
an_int = 3
test_locals()
call_test_locals()
output:
{'an_int': 5}
{'an_int': 3}
The behavior is going to depend on the Python implementation and probably other edge cases, but a few examples where (1) the variable is defined and is not updated; (2) the variable is not defined and is updated; (3) the variable is defined and subsequently deleted and is not updated.
def test_locals():
frame = inspect.stack()[1][0]
caller_locals = frame.f_locals
caller_locals['an_int'] = 5
def call_test_locals1():
an_int = 3
print('calling', locals())
test_locals()
print('done', locals())
def call_test_locals2():
print('calling', locals())
test_locals()
print('done', locals())
def call_test_locals3():
an_int = 3
del an_int
print('calling', locals())
test_locals()
print('done', locals())
print('\n1:')
call_test_locals1()
print('\n2:')
call_test_locals2()
print('\n3:')
call_test_locals3()
output:
1:
calling {'an_int': 3}
done {'an_int': 3}
2:
calling {}
done {'an_int': 5}
3:
calling {}
done {}
If you're running Python 2, you could use exec to execute a string into the local namespace, but it won't work in Python 3 and is in general probably a bad idea.
import shelve
import logging
import inspect
logger = logging.getLogger()
def save_locals(filename, keys=None):
my_shelf = shelve.open(filename, 'n') # 'n' for new
caller_locals = inspect.stack()[1][0].f_locals
if keys is None:
keys = caller_locals.keys()
for key in keys:
try:
my_shelf[key] = caller_locals[key]
except TypeError:
#
# __builtins__, my_shelf, and imported modules can not be shelved.
#
print('ERROR shelving: {0}'.format(key))
my_shelf.close()
def load_locals_string(filename, keys=None):
my_shelf = shelve.open(filename)
if keys is None:
keys = list(my_shelf.keys())
return ';'.join('{}={!r}'.format(key, my_shelf[key]) for key in keys)
and
from unittest import TestCase
from .io import save_locals, load_locals
class TestIo(TestCase):
def test_save_load(self):
sanity = 'sanity'
an_int = 3
a_float = 3.14
a_list = [1, 2, 3]
a_dict = [{'a': 5, 'b': 3}]
save_locals('temp')
an_int = None
a_float = None
a_list = None
a_dict = None
exec load_locals_string('temp')
self.assertIn('an_int', locals())
self.assertIn('a_float', locals())
self.assertIn('a_list', locals())
self.assertIn('a_dict', locals())
self.assertEqual(an_int, 3)
self.assertEqual(a_float, 3.14)
self.assertEqual(a_list, [1, 2, 3])
self.assertEqual(a_dict, [{'a': 5, 'b': 3}])
In Python 2, exec uses PyFrame_LocalsToFast to copy the variables back to the local scope, but can't in Python 3 because exec is a function. Martijn Pieters has a good post about it.

Related

How to temporarily override a member of a dictionary?

There are times when it's necessary to override a dictionary member that might already exist, execute arbitrary code (such as a callback, which could fail) then set the value back to it's previous state (which includes not being present).
Keeping a copy of the dictionary isn't an option since this dictionary might have other members modified by the callback (which I want to keep).
How should a dictionary item be overridden temporarily?
Here is a very straight-forward implementation:
import contextlib
#contextlib.contextmanager
def temp_item(dictionary, key, value):
empty = object()
original = dictionary.get(key, empty)
dictionary[key] = value
try:
yield dictionary
finally:
if original is empty:
dictionary.pop(key, None)
else:
dictionary[key] = original
This can be used as follows:
d = {'a':1, 'b':2}
with temp_item(d, 'c', '3') as d:
d['d'] = 4
d['a'] = 10
print(d['c'])
print(d)
Which outputs:
3
{'a': 10, 'b': 2, 'd': 4}
unittest.mock provides patch.dict
It can patch not only dictionaries, but objects that behave like them and also clear out the mock with or without mocking its contents
However, it's probably best to only rely on unittest in a test context
import unittest
from unittest.mock import patch
class TestWhatever(unittest.TestCase):
def test_dictionary_mocking(self):
with patch.dict("os.environ", {"RUNNING_AS_DOCKER": "true"}):
self.assertTrue(detect_docker())
with patch.dict("os.environ", clear=True):
self.assertFalse(detect_docker())
This can be done inline using a try/finally block.
# Set to any object you know the dictionary won't use as a value.
sentinel = object()
value_orig = mydict.get(key, sentinel)
mydict[key] = value_new
try:
run_callback()
finally:
if value_orig is sentinel:
# Use pop in case the callback added this key.
mydict.pop(key, None)
else:
mydict[key] = value_orig
Wrapped into a context manager that takes a dictionary as an argument (instead of a single key: value pair, for added flexibility):
class DictOverride:
__slots__ = ("dict_base", "items_override", "values_orig")
_sentinel = object()
def __init__(self, dict_base, dict_override):
sentinel = self._sentinel
self.items_override = tuple(dict_override.items())
self.values_orig = [
dict_base.get(key, sentinel)
for key, _ in self.items_override
]
self.dict_base = dict_base
def __enter__(self):
dict_base = self.dict_base
for key, value in self.items_override:
dict_base[key] = value
def __exit__(self, _type, _value, _traceback):
sentinel = self._sentinel
dict_base = self.dict_base
for (key, value), value_orig in zip(
self.items_override,
self.values_orig,
):
if value_orig is sentinel:
dict_base.pop(key)
else:
dict_base[key] = value_orig
# COntext manager test case
dct_test = {"eggs": "soft", "coconut": "hard"}
print("Original:", dct_test)
with DictOverride(dct_test, {"eggs": "hard"}):
print("Override:", dct_test)
print("Original:", dct_test, "(again)")
Which outputs:
Original: {'eggs': 'soft', 'coconut': 'hard'}
Override: {'eggs': 'hard', 'coconut': 'hard'}
Original: {'eggs': 'soft', 'coconut': 'hard'} (again)

Nested dictionary that acts as defaultdict when setting items but not when getting items

I want to implement a dict-like data structure that has the following properties:
from collections import UserDict
class TestDict(UserDict):
pass
test_dict = TestDict()
# Create empty dictionaries at 'level_1' and 'level_2' and insert 'Hello' at the 'level_3' key.
test_dict['level_1']['level_2']['level_3'] = 'Hello'
>>> test_dict
{
'level_1': {
'level_2': {
'level_3': 'Hello'
}
}
}
# However, this should not return an empty dictionary but raise a KeyError.
>>> test_dict['unknown_key']
KeyError: 'unknown_key'
The problem, to my knowledge, is that python does not know whether __getitem__ is being called in the context of setting an item, i.e. the first example, or in the context of getting and item, the second example.
I have already seen Python `defaultdict`: Use default when setting, but not when getting, but I do not think that this question is a duplicate, or that it answers my question.
Please let me know if you have any ideas.
Thanks in advance.
EDIT:
It is possible to achieve something similar using:
def set_nested_item(dict_in: Union[dict, TestDict], value, keys):
for i, key in enumerate(keys):
is_last = i == (len(keys) - 1)
if is_last:
dict_in[key] = value
else:
if key not in dict_in:
dict_in[key] = {}
else:
if not isinstance(dict_in[key], (dict, TestDict)):
dict_in[key] = {}
dict_in[key] = set_nested_item(dict_in[key], value, keys[(i + 1):])
return dict_in
class TestDict(UserDict):
def __init__(self):
super().__init__()
def __setitem__(self, key, value):
if isinstance(key, list):
self.update(set_nested_item(self, value, key))
else:
super().__setitem__(key, value)
test_dict[['level_1', 'level_2', 'level_3']] = 'Hello'
>>> test_dict
{
'level_1': {
'level_2': {
'level_3': 'Hello'
}
}
}
It's impossible.
test_dict['level_1']['level_2']['level_3'] = 'Hello'
is semantically equivalent to:
temp1 = test_dict['level_1'] # Should this line fail?
temp1['level_2']['level_3'] = 'Hello'
But... if determined to implement it anyway, you could inspect the Python stack to grab/parse the calling line of code, and then vary the behaviour depending on whether the calling line of code contains an assignment! Unfortunately, sometimes the calling code isn't available in the stack trace (e.g. when called interactively), in which case you need to work with Python bytecode.
import dis
import inspect
from collections import UserDict
def get_opcodes(code_object, lineno):
"""Utility function to extract Python VM opcodes for line of code"""
line_ops = []
instructions = dis.get_instructions(code_object).__iter__()
for instruction in instructions:
if instruction.starts_line == lineno:
# found start of our line
line_ops.append(instruction.opcode)
break
for instruction in instructions:
if not instruction.starts_line:
line_ops.append(instruction.opcode)
else:
# start of next line
break
return line_ops
class TestDict(UserDict):
def __getitem__(self, key):
try:
return super().__getitem__(key)
except KeyError:
# inspect the stack to get calling line of code
frame = inspect.stack()[1].frame
opcodes = get_opcodes(frame.f_code, frame.f_lineno)
# STORE_SUBSCR is Python opcode for TOS1[TOS] = TOS2
if dis.opmap['STORE_SUBSCR'] in opcodes:
# calling line of code contains a dict/array assignment
default = TestDict()
super().__setitem__(key, default)
return default
else:
raise
test_dict = TestDict()
test_dict['level_1']['level_2']['level_3'] = 'Hello'
print(test_dict)
# {'level_1': {'level_2': {'level_3': 'Hello'}}}
test_dict['unknown_key']
# KeyError: 'unknown_key'
The above is just a partial solution. It can still be fooled if there are other dictionary/array assignments on the same line, e.g. other['key'] = test_dict['unknown_key']. A more complete solution would need to actually parse the line of code to figure out where the variable occurs in the assignment.

How to load into Globals in Python from a function

I have a function which load data into a dictionnary.
But, How can I load the dictionnary into Globals() inside a function.
Inside a function is important since we can do it easily outside on a script side.
def load237(filename):
filename = osp.abspath(filename)
old_cwd = os.getcwdu()
os.chdir(osp.dirname(filename))
error_message = None
try:
tar = tarfile.open(filename, "r")
tar.extractall()
pickle_filename = osp.splitext(filename)[0]+'.pickle'
data = cPickle.load(file(pickle_filename))
saved_arrays = {}
if load_array is not None:
try:
saved_arrays = data.pop('__saved_arrays__')
for (name, index), fname in saved_arrays.iteritems():
arr = np.load( osp.join(osp.dirname(filename), fname) )
if index is None:
data[name] = arr
elif isinstance(data[name], dict):
data[name][index] = arr
else:
data[name].insert(index, arr)
except KeyError:
pass
for fname in [pickle_filename]+[fn for fn in saved_arrays.itervalues()]:
os.remove(fname)
except (EOFError, ValueError), error:
error_message = unicode(error)
os.chdir(old_cwd)
return data, error_message
This one does not work (globals is local to the module/function...)
def load_inmemory(fpath):
globals().update(load237(fpath)[0])
You should really be storing those names on an object stored in a global and not as global variables. But you asked how to do it and so here is how:
Using Getting corresponding module from function with a for loop and setattr as modules do not support dictionary operations and it is possible to write the function as:
import sys
def load_inmemory():
module = sys.modules[load_inmemory.__module__]
for k, v in load237(fpath)[0].items():
setattr(module, k, v)
load_inmemory()
print x
I tested the following:
import sys
def func():
module = sys.modules[func.__module__]
for k,v in {'x':4}.items():
setattr(module, k, v)
func()
print x
Prints 4. Tested in Python 2.7.3.

How to access Aʟʟ global variables in a function?

I'm trying to mimic the matlab load and save functions. I'm following this thread: Shelve Code gives KeyError
It is smart. However, if I write that code in a separate module, and try to import that module and invoke that function, then it can't access the global variables.
Specifically, I write a happy.py and have the functions inside:
def save(filename='tmp', globals_=None):
if globals_ is None:
globals_ = globals()
globals()
import shelve
my_shelf = shelve.open(filename, 'n')
for key, value in globals_.items():
if not key.startswith('__'):
try:
my_shelf[key] = value
except Exception:
print('ERROR shelving: "%s"' % key)
else:
print('shelved: "%s"' % key)
my_shelf.close()
def load(filename='tmp', globals_=None):
import shelve
my_shelf = shelve.open(filename)
for key in my_shelf:
globals()[key] = my_shelf[key]
my_shelf.close()
and when I try
a = 1
b = 2
happy.save()
It would not give save a and b.
Is this because global() would not give the objects outside the module? How can I do what I want to do then?
The following will work as a separate module:
import shelve
import sys
import types
EXCLUDED_TYPES = (types.ModuleType,) # Everything can't be shelved.
def save(filename='tmp', globals_=None):
if globals_ is None:
globals_ = sys._getframe(1).f_globals # Caller's globals.
with shelve.open(filename, 'n') as my_shelf:
for key, value in globals_.items():
if not (key.startswith('__') or isinstance(value, EXCLUDED_TYPES)):
try:
my_shelf[key] = value
except Exception as e:
print('ERROR shelving: "%s"' % key, 'Exception:', e)
else:
print('shelved: "%s"' % key)
def load(filename='tmp', globals_=None):
if globals_ is None:
globals_ = sys._getframe(1).f_globals # Caller's globals.
with shelve.open(filename) as my_shelf:
for key in my_shelf:
globals_[key]=my_shelf[key]
print('unshelved: "%s"' % key)
Generally speaking, I don't think it's a good idea for a function to create global variables like this. Also note that load() might silently change existing values in the caller's namespace.
You can't easily save all global namespaces, since there's one associated with every module loaded, in addition to __main__'s. If you really want to do that, it might be possible to do so by iterating through the contents of sys.modules.
You can use inspect to look at the stack. This silly (poorly named function) that I've defined seems to do an OK job of picking up the global variables from the calling namespace although I haven't tested it extensively. I am also unsure about whether it will work with different python implementations. (I mention this because the inspect.currentframe function is definitely implementation dependent). It seems to work OK with Cpython for what it's worth.
import inspect
def save(globals=None):
if globals is None:
frames = inspect.stack()
caller_frame = frames[-1][0]
globals = dict((k,v) for (k,v) in caller_frame.f_globals.items() if not k.startswith('__'))
return globals
if __name__ == "__main__":
a = 1
b = 2
print save()
I don't have a problem with this code when it is pasted into an console:
>>> def save(filename='tmp',globals_=None):
... import shelve
... globals_ = globals_ or globals()
... my_shelf= shelve.open(filename, 'n')
... for key, value in globals_.items():
... if not key.startswith('__'):
... try:
... my_shelf[key] = value
... except Exception:
... print('ERROR shelving: "%s"' % key)
... else:
... print('shelved: "%s"' % key)
... my_shelf.close()
...
>>> def load(filename='tmp',globals_=None):
... import shelve
... my_shelf = shelve.open(filename)
... for key in my_shelf:
... globals()[key]=my_shelf[key]
... my_shelf.close()
...
>>> a, b = 1, 2
>>> save()
shelved: "load"
shelved: "a"
shelved: "b"
shelved: "save"
And then:
>>> def save(filename='tmp',globals_=None):
... import shelve
... globals_ = globals_ or globals()
... my_shelf= shelve.open(filename, 'n')
... for key, value in globals_.items():
... if not key.startswith('__'):
... try:
... my_shelf[key] = value
... except Exception:
... print('ERROR shelving: "%s"' % key)
... else:
... print('shelved: "%s"' % key)
... my_shelf.close()
...
>>> def load(filename='tmp',globals_=None):
... import shelve
... my_shelf = shelve.open(filename)
... for key in my_shelf:
... globals()[key]=my_shelf[key]
... my_shelf.close()
...
>>> load()
>>> a, b
(1, 2)
But it is a bit odd when you use it as a module:
>>> from happy import *
>>> a, b
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'a' is not defined
>>> load()
>>> a, b
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'a' is not defined
>>> happy.a
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'happy' is not defined
>>> from happy import *
>>> a, b
(1, 2)
Is there enough here for you to have a work-around?

Get last function's call arguments from traceback?

Can I get the parameters of the last function called in traceback? How?
I want to make a catcher for standard errors to make readable code, yet provide detailed information to user.
In the following example I want GET_PARAMS to return me a tuple of parameters supplied to os.chown. Examining the inspect module advised by Alex Martelli, I couldn't find that.
def catch_errors(fn):
def decorator(*args, **kwargs):
try:
return fn(*args, **kwargs)
except (IOError, OSError):
msg = sys.exc_info()[2].tb_frame.f_locals['error_message']
quit(msg.format(SEQUENCE_OF_PARAMETERS_OF_THE_LAST_FUNCTION_CALLED)\
+ '\nError #{0[0]}: {0[1]}'.format(sys.exc_info()[1].args), 1)
return decorator
#catch_errors
def do_your_job():
error_message = 'Can\'t change folder ownership \'{0}\' (uid:{1}, gid:{2})'
os.chown('/root', 1000, 1000) # note that params aren't named vars.
if __name == '__main__' and os.getenv('USERNAME') != 'root':
do_your_job()
(Thanks to Jim Robert for the decorator)
For such inspection tasks, always think first of module inspect in the standard library. Here, inspect.getargvalues gives you the argument values given a frame, and inspect.getinnerframes gives you the frames of interest from a traceback object.
Here is an example of such function and some problems that you can't get around:
import sys
def get_params(tb):
while tb.tb_next:
tb = tb.tb_next
frame = tb.tb_frame
code = frame.f_code
argcount = code.co_argcount
if code.co_flags & 4: # *args
argcount += 1
if code.co_flags & 8: # **kwargs
argcount += 1
names = code.co_varnames[:argcount]
params = {}
for name in names:
params[name] = frame.f_locals.get(name, '<deleted>')
return params
def f(a, b=2, c=3, *d, **e):
del c
c = 4
e['g'] = 6
assert False
try:
f(1, f=5)
except:
print get_params(sys.exc_info()[2])
The output is:
{'a': 1, 'c': 4, 'b': 2, 'e': {'g': 6, 'f': 5}, 'd': ()}
I didn't used inspect.getinnerframes() to show another way to get needed frame. Although it simplifies a bit, it also do some extra work that is not needed for you while being relatively slow (inspect.getinnerframes() reads source file for every module in traceback; this is not important for one debugging call, but could be an issue in other cases).
The problem with using a decorator for what you're trying to achieve is that the frame the exception handler gets is do_your_job()s, not os.listdir()s, os.makedirs()s or os.chown()s. So the information you'll be printing out is the arguments to do_your_job(). In order to get the behavior I think you intend, you would have to decorate all the library functions you're calling.

Categories