Hidden calls to __getattribute__ - python

I'm trying to write a mixin that increments a counter every time a public attribute is read. My implementation below seems to have a hidden increment that I cannot find. From the commented out print statements I can identify it as occurring in the self._read_count += 1 statement. What am I misunderstanding?
class CounterMixin:
def __init__(self):
self._read_count = 0;
super().__init__();
def _inc_read_count(self):
# print(f'Pre-inc: {self._read_count}');
self._read_count += 1;
# print(f'Post-inc: {self._read_count}');
#property
def read_count(self):
self._inc_read_count();
return self._read_count;
def __getattribute__(self, attr):
if attr[0] != '_':
# print(f'Counting {attr}');
self._inc_read_count();
else:
# print(f'Not counting {attr}');
pass;
return super().__getattribute__(attr);
class Bar(CounterMixin):
pass
foo = Bar();
print(foo.read_count);
print('---');
foo.x = 1;
print(foo.read_count);
print('---');
_ = foo.x;
print(foo.read_count);
Expected output:
1
---
2
---
4
Actual output:
2
---
4
---
7

You're incrementing self._read_count twice when you access read_count, once in __getattribute__ and once in the property getter.

Related

Python - Log the things a string has previously been

If this is my code:
x = 1
x = 2
x = 3
How can I “log” the things x has been and print them? If my explanation was dumb, then here’s what I expect:
>>> # Code to print the things x has been
1, 2, 3
>>>
How can I achieve this?
Since assignment overwrites the value of the object (in your example 'x'), it is not possible to do exactly what you want. However, you could create an object, of which the value can be changed and its history remembered. For example like this:
#!/usr/bin/env/python3
class ValueWithHistory():
def __init__(self):
self.history = []
self._value = None
#property
def value(self):
return self._value
#value.setter
def value(self, new_value):
self.history.append(new_value)
self._value = new_value
def get_history(self):
return self.history
def clear_history(self):
self.history.clear()
def main():
test = ValueWithHistory()
test.value = 1
print(test.value)
test.value = 2
print(test.value)
test.value = 3
print(test.value)
print(test.get_history())
if __name__ == '__main__':
main()
This prints:
1
2
3
[1, 2, 3]
Of course, you could also use a set instead of a list to only remember each unique value once, for example.
You can order a second thread to observe the string and print the changes:
from threading import Thread
def string_watcher():
global my_string
global log
temp = ''
while True:
if my_string != temp:
log.append(my_string)
temp = my_string
t = Thread(target=string_watcher, daemon=True)
t.start()
This checks weather the string „my_string“ was manipulated and appends it to the list „log“, if it has been changed. With this you should be able to perform
Print(log)
At any moment of the runtime

While evaluating length of a list that is defined as a class

For example, I got a class named stack,
class stack: #Will be used for probable files!
def __init__(self):
self.data = []
def add(self, element):
self.data.append(element)
def number_of_elements(self):
return len(self.data)
def stackType(self):
if self.number_of_elements == 0:
return 0
elif self.number_of_elements == 1:
return 1
else:
return -1
I then do this:
foo = stack()
print foo.stackType()
I get -1 however I was expecting a return of 1
Why is it so and how can I handle with it?
That's because you did not call the call the method self.number_of_elements; you merely tested to see if it equalled 0 or 1.
Modify your code to actually call the method using this syntax: self.number_of_elements() [notice the use of () to call the method]:
def stackType(self) :
if self.number_of_elements() == 0 :
return 0
elif self.number_of_elements() == 1 :
return 1
else :
return -1
You could also have written it like this:
def stack_type(self):
n = self.number_of_elements()
return -1 if n > 1 else n
which would be an improvement because number_of_elements() will be called once only. In your code the method could be called twice. I renamed the function to be consistent with the Python method naming conventions set out in PEP8.
Because self.number_of_elements is not the same as self.number_of_elements()!
The former is a reference to the function, the latter is a call to the function actually calculating the length of your stack.
self.number_of_elements is a function, so its value is neither zero nor 1

Python patch existing class

I'm an experienced PHP/Ruby developer but right now I'm fighting Python and I really need your help.
I need to patch existing class by adding static attribute and overwriting static function to use it.
Let me show you example:
class Test():
#staticmethod
def generate():
return 10
But in my test suite I need to get the following class:
class Test():
count = 1
#staticmethod
def generate():
if Test.count < 3:
Test.count += 1
return 1
else:
return 10
So the basic idea is to get 10 only on the 3rd call of "generate" function.
My first approach was to use "patch" technique, so I did:
def my_generate_hash():
return 99
with patch.object(Test, 'generate', staticmethod(my_generate_hash)):
print "Got %d" % check_hash()
Buuut I was unable to implement attribute "count" and use it in overriding method (
Second thought was to "Mock" something! So..
mock = MagicMock(Test)
mock.count = 1
def my_generate_hash():
if Test2.count < 3:
Test2.count += 1
return 1
else:
return 10
mock.generate = my_generate_hash
with patch('__main__.Test', mock):
print Test.generate()
But in real case I have other methods in "Test" class, so it won't work.
I'm stuck. Any help will be appreciated!
It might be simpler to subclass the original Test class for use in your tests:
class Test(object):
#staticmethod
def generate():
return 10
class PatchedTest(Test):
count = 1
#staticmethod
def generate():
if Test.count < 3:
Test.count += 1
return 1
else:
return 10
The replacement function could also be done in two somewhat better ways, both of which should make it a lot easier to patch the Test class in the same way you were trying in your question:
Use a #classmethod, allowing the function to access the class it's assigned to:
class PatchedTest(Test):
count = 1
#classmethod
def generate(cls):
if cls.count < 3:
cls.count += 1
return 1
else:
return 10
Use a generator instead - each time the function is called it will continue execution where it last left off. However, this will only work if you are iterating over the functions result:
def alternative_generate():
yield 1
yield 1
yield 10
Looks like in can be in a different way.
self.count = 0
def generate():
if self.count < 3
self.count += 1
return 10
else:
return 99
with patch.object(Test, 'generate', generate):
self.assertEqual(Test.generate(), 10)
self.assertEqual(Test.generate(), 10)
self.assertEqual(Test.generate(), 10)
self.assertEqual(Test.generate(), 99)
self.assertEqual(Test.generate(), 99)

Why does my synchronous job code does not work

I have wrote below code segment to initiate function in every 2 seconds. But, it seems it is not work. Why?
from threading import Timer
class A :
value = None
def AX(self):
value = 12
obj = B()
Timer(1,obj.BY, [self.value]).start()
class B:
def BY(self,value):
print "refreshed :", value
if __name__=='__main__':
obj = A()
obj.AX()
You also need to set value on self, or use the local variable as the argument:
def AX(self):
self.value = 12
obj = B()
Timer(1, obj.BY, [self.value]).start()
or:
def AX(self):
value = 12
obj = B()
Timer(1, obj.BY, [value]).start()
This prints:
>>> A().AX()
>>> refreshed : 12
Note that the Timer() instance will only call obj.BY once, it doesn't repeatedly call the function.
You need to reset the timer every time BY is called to do that:
class B:
def BY(self,value):
print "refreshed :", value
Timer(2, self.BY, [value]).start()
which will create a loop. If you need that loop to terminate at some point, you'll need to test for that condition separately, in BY:
class B:
somecondition = False
def BY(self,value):
print "refreshed :", value
if self.somecondition:
return # do not set the timer again
Timer(2, self.BY, [value]).start()

Lazy data-flow (spreadsheet like) properties with dependencies in Python

My problem is the following: I have some python classes that have properties that are derived from other properties; and those should be cached once they are calculated, and the cached results should be invalidated each time the base properties are changed.
I could do it manually, but it seems quite difficult to maintain if the number of properties grows. So I would like to have something like Makefile rules inside my objects to automatically keep track of what needs to be recalculated.
The desired syntax and behaviour should be something like that:
# this does dirty magic, like generating the reverse dependency graph,
# and preparing the setters that invalidate the cached values
#dataflow_class
class Test(object):
def calc_a(self):
return self.b + self.c
def calc_c(self):
return self.d * 2
a = managed_property(calculate=calc_a, depends_on=('b', 'c'))
b = managed_property(default=0)
c = managed_property(calculate=calc_c, depends_on=('d',))
d = managed_property(default=0)
t = Test()
print t.a
# a has not been initialized, so it calls calc_a
# gets b value
# c has not been initialized, so it calls calc_c
# c value is calculated and stored in t.__c
# a value is calculated and stored in t.__a
t.b = 1
# invalidates the calculated value stored in self.__a
print t.a
# a has been invalidated, so it calls calc_a
# gets b value
# gets c value, from t.__c
# a value is calculated and stored in t.__a
print t.a
# gets value from t.__a
t.d = 2
# invalidates the calculated values stored in t.__a and t.__c
So, is there something like this already available or should I start implementing my own? In the second case, suggestions are welcome :-)
Here, this should do the trick.
The descriptor mechanism (through which the language implements "property") is
more than enough for what you want.
If the code bellow does not work in some corner cases, just write me.
class DependentProperty(object):
def __init__(self, calculate=None, default=None, depends_on=()):
# "name" and "dependence_tree" properties are attributes
# set up by the metaclass of the owner class
if calculate:
self.calculate = calculate
else:
self.default = default
self.depends_on = set(depends_on)
def __get__(self, instance, owner):
if hasattr(self, "default"):
return self.default
if not hasattr(instance, "_" + self.name):
setattr(instance, "_" + self.name,
self.calculate(instance, getattr(instance, "_" + self.name + "_last_value")))
return getattr(instance, "_" + self.name)
def __set__(self, instance, value):
setattr(instance, "_" + self.name + "_last_value", value)
setattr(instance, "_" + self.name, self.calculate(instance, value))
for attr in self.dependence_tree[self.name]:
delattr(instance, attr)
def __delete__(self, instance):
try:
delattr(instance, "_" + self.name)
except AttributeError:
pass
def assemble_tree(name, dict_, all_deps = None):
if all_deps is None:
all_deps = set()
for dependance in dict_[name].depends_on:
all_deps.add(dependance)
assemble_tree(dependance, dict_, all_deps)
return all_deps
def invert_tree(tree):
new_tree = {}
for key, val in tree.items():
for dependence in val:
if dependence not in new_tree:
new_tree[dependence] = set()
new_tree[dependence].add(key)
return new_tree
class DependenceMeta(type):
def __new__(cls, name, bases, dict_):
dependence_tree = {}
properties = []
for key, val in dict_.items():
if not isinstance(val, DependentProperty):
continue
val.name = key
val.dependence_tree = dependence_tree
dependence_tree[key] = set()
properties.append(val)
inverted_tree = {}
for property in properties:
inverted_tree[property.name] = assemble_tree(property.name, dict_)
dependence_tree.update(invert_tree(inverted_tree))
return type.__new__(cls, name, bases, dict_)
if __name__ == "__main__":
# Example and visual test:
class Bla:
__metaclass__ = DependenceMeta
def calc_b(self, x):
print "Calculating b"
return x + self.a
def calc_c(self, x):
print "Calculating c"
return x + self.b
a = DependentProperty(default=10)
b = DependentProperty(depends_on=("a",), calculate=calc_b)
c = DependentProperty(depends_on=("b",), calculate=calc_c)
bla = Bla()
bla.b = 5
bla.c = 10
print bla.a, bla.b, bla.c
bla.b = 10
print bla.b
print bla.c
I would like to have something like Makefile rules
then use one! You may consider this model:
one rule = one python file
one result = one *.data file
the pipe is implemented as a makefile or with another dependency analysis tool (cmake, scons)
The hardware test team in our company use such a framework for intensive exploratory tests:
you can integrate other languages and tools easily
you get a stable and proven solution
computations may be distributed one multiple cpu/computers
you track dependencies on values and rules
debug of intermediate values is easy
the (big) downside to this method is that you have to give up python import keyword because it creates an implicit (and untracked) dependency (there are workarounds for this).
import collections
sentinel=object()
class ManagedProperty(object):
'''
If deptree = {'a':set('b','c')}, then ManagedProperties `b` and
`c` will be reset whenever `a` is modified.
'''
def __init__(self,property_name,calculate=None,depends_on=tuple(),
default=sentinel):
self.property_name=property_name
self.private_name='_'+property_name
self.calculate=calculate
self.depends_on=depends_on
self.default=default
def __get__(self,obj,objtype):
if obj is None:
# Allows getattr(cls,mprop) to return the ManagedProperty instance
return self
try:
return getattr(obj,self.private_name)
except AttributeError:
result=(getattr(obj,self.calculate)()
if self.default is sentinel else self.default)
setattr(obj,self.private_name,result)
return result
def __set__(self,obj,value):
# obj._dependencies is defined by #register
map(obj.__delattr__,getattr(obj,'_dependencies').get(self.property_name,tuple()))
setattr(obj,self.private_name,value)
def __delete__(self,obj):
if hasattr(obj,self.private_name):
delattr(obj,self.private_name)
def register(*mproperties):
def flatten_dependencies(name, deptree, all_deps=None):
'''
A deptree such as {'c': set(['a']), 'd': set(['c'])} means
'a' depends on 'c' and 'c' depends on 'd'.
Given such a deptree, flatten_dependencies('d', deptree) returns the set
of all property_names that depend on 'd' (i.e. set(['a','c']) in the
above case).
'''
if all_deps is None:
all_deps = set()
for dep in deptree.get(name,tuple()):
all_deps.add(dep)
flatten_dependencies(dep, deptree, all_deps)
return all_deps
def classdecorator(cls):
deptree=collections.defaultdict(set)
for mprop in mproperties:
setattr(cls,mprop.property_name,mprop)
# Find all ManagedProperties in dir(cls). Note that some of these may be
# inherited from bases of cls; they may not be listed in mproperties.
# Doing it this way allows ManagedProperties to be overridden by subclasses.
for propname in dir(cls):
mprop=getattr(cls,propname)
if not isinstance(mprop,ManagedProperty):
continue
for underlying_prop in mprop.depends_on:
deptree[underlying_prop].add(mprop.property_name)
# Flatten the dependency tree so no recursion is necessary. If one were
# to use recursion instead, then a naive algorithm would make duplicate
# calls to __delete__. By flattening the tree, there are no duplicate
# calls to __delete__.
dependencies={key:flatten_dependencies(key,deptree)
for key in deptree.keys()}
setattr(cls,'_dependencies',dependencies)
return cls
return classdecorator
These are the unit tests I used to verify its behavior.
if __name__ == "__main__":
import unittest
import sys
def count(meth):
def wrapper(self,*args):
countname=meth.func_name+'_count'
setattr(self,countname,getattr(self,countname,0)+1)
return meth(self,*args)
return wrapper
class Test(unittest.TestCase):
def setUp(self):
#register(
ManagedProperty('d',default=0),
ManagedProperty('b',default=0),
ManagedProperty('c',calculate='calc_c',depends_on=('d',)),
ManagedProperty('a',calculate='calc_a',depends_on=('b','c')))
class Foo(object):
#count
def calc_a(self):
return self.b + self.c
#count
def calc_c(self):
return self.d * 2
#register(ManagedProperty('c',calculate='calc_c',depends_on=('b',)),
ManagedProperty('a',calculate='calc_a',depends_on=('b','c')))
class Bar(Foo):
#count
def calc_c(self):
return self.b * 3
self.Foo=Foo
self.Bar=Bar
self.foo=Foo()
self.foo2=Foo()
self.bar=Bar()
def test_two_instances(self):
self.foo.b = 1
self.assertEqual(self.foo.a,1)
self.assertEqual(self.foo.b,1)
self.assertEqual(self.foo.c,0)
self.assertEqual(self.foo.d,0)
self.assertEqual(self.foo2.a,0)
self.assertEqual(self.foo2.b,0)
self.assertEqual(self.foo2.c,0)
self.assertEqual(self.foo2.d,0)
def test_initialization(self):
self.assertEqual(self.foo.a,0)
self.assertEqual(self.foo.calc_a_count,1)
self.assertEqual(self.foo.a,0)
self.assertEqual(self.foo.calc_a_count,1)
self.assertEqual(self.foo.b,0)
self.assertEqual(self.foo.c,0)
self.assertEqual(self.foo.d,0)
self.assertEqual(self.bar.a,0)
self.assertEqual(self.bar.b,0)
self.assertEqual(self.bar.c,0)
self.assertEqual(self.bar.d,0)
def test_dependence(self):
self.assertEqual(self.Foo._dependencies,
{'c': set(['a']), 'b': set(['a']), 'd': set(['a', 'c'])})
self.assertEqual(self.Bar._dependencies,
{'c': set(['a']), 'b': set(['a', 'c'])})
def test_setting_property_updates_dependent(self):
self.assertEqual(self.foo.a,0)
self.assertEqual(self.foo.calc_a_count,1)
self.foo.b = 1
# invalidates the calculated value stored in foo.a
self.assertEqual(self.foo.a,1)
self.assertEqual(self.foo.calc_a_count,2)
self.assertEqual(self.foo.b,1)
self.assertEqual(self.foo.c,0)
self.assertEqual(self.foo.d,0)
self.foo.d = 2
# invalidates the calculated values stored in foo.a and foo.c
self.assertEqual(self.foo.a,5)
self.assertEqual(self.foo.calc_a_count,3)
self.assertEqual(self.foo.b,1)
self.assertEqual(self.foo.c,4)
self.assertEqual(self.foo.d,2)
self.assertEqual(self.bar.a,0)
self.assertEqual(self.bar.calc_a_count,1)
self.assertEqual(self.bar.b,0)
self.assertEqual(self.bar.c,0)
self.assertEqual(self.bar.calc_c_count,1)
self.assertEqual(self.bar.d,0)
self.bar.b = 2
self.assertEqual(self.bar.a,8)
self.assertEqual(self.bar.calc_a_count,2)
self.assertEqual(self.bar.b,2)
self.assertEqual(self.bar.c,6)
self.assertEqual(self.bar.calc_c_count,2)
self.assertEqual(self.bar.d,0)
self.bar.d = 2
self.assertEqual(self.bar.a,8)
self.assertEqual(self.bar.calc_a_count,2)
self.assertEqual(self.bar.b,2)
self.assertEqual(self.bar.c,6)
self.assertEqual(self.bar.calc_c_count,2)
self.assertEqual(self.bar.d,2)
sys.argv.insert(1,'--verbose')
unittest.main(argv=sys.argv)

Categories