Use alias for a property in Python at module level - python

I have the following in Python 2.7:
class MyClass(object):
...
#property
def my_attr(self):
...
#my_attr.setter
def my_attr(self, value):
...
I use getter/setter so that I can do some logic in there.
Then I can call:
import myModule
test = myModule.MyClass()
test.my_attr = 9
I would like to use an alias at the module level so that I can do something like that:
import myModule
myModule.my_attr = 9
Is there a way to do that?

Yes, absolutely; the key is that modules are themselves objects. First you need to make MyClass subclass the module type:
from types import ModuleType
class MyClass(ModuleType):
...
Then you replace the current module with an instance of MyClass:
import sys
sys.modules[__name__] = MyClass(__name__)
Note that this can be pretty confusing to static analysers and to people reading your code.

To provide a special handling for some attributes of a module, you could define a proxy class that does the special handling and delegates the rest to the original module object:
"""
>>> import property_on_module
>>> property_on_module.attr = 1
set attr property
>>> property_on_module.attr
get attr property
1
"""
import sys
class Module(object):
def __init__(self, module):
self.__module = module
def __getattr__(self, name):
return getattr(self.__module, name)
#property
def attr(self):
print("get attr property")
return self.__attr
#attr.setter
def attr(self, value):
print("set attr property")
self.__attr = value
if __name__ == "__main__": # test if run as a script
import doctest
sys.exit(doctest.testmod().failed)
else: # normal import, use `Module` class to provide `attr` property
sys.modules[__name__] = Module(sys.modules[__name__])
__getattr__ might not be enough; you could define __getattribute__/__setattr__ in this case e.g., quickdraw.py (based on sh.py).

Related

Check class instance passed to another module is of type class

How to check type of class instance object passed from another module. Does it require from main import MyClass? Or can I just generally check object is of type "(any) class instance"?
# main.py
#########
import sub
class MyClass():
def __init__(self):
self.z = 1
a = MyClass()
assert type(a) == MyClass
assert isinstance(a, MyClass) == True
b = sub.fun(a)
# sub.py
########
def fun(i):
if isinstance(i, class): # general instead of __main__.MyClass
return i.__dict__
The above yields
NameError: name 'class' is not defined
Maybe it is not a good design and code shall be explicit rather than converting each class to dict?
I want to convert each class instance passed to fun to dictionary
Try the below
class Foo:
def __init__(self, x):
self.x = x
class Bar:
def __init__(self, x):
self.x = x
def fun(instance):
if hasattr(instance, '__dict__'):
return instance.__dict__
print(fun(Foo(56)))
print(fun(Bar("56")))
output
{'x': 56}
{'x': '56'}
Yes, python needs to know which class you are checking because you can have two different classes with the same name in different files.
In file sub.py you have to write:
from main import MyClass
def fun(i):
if isinstance(i, MyClass):
return i.__dict__
But in case you importing fun into main.py you will get circular import, be careful)
# sub.py
########
def fun(i):
if isinstance(i, class): # <---- class is keyword in Python, you cannot use it
# like this. You can use as type(class)
return i.__dict__

pytest: unittest error object has no attribute 'assert_called_once_with

I am writing unit test in pytest and getting error on assert_called_once_with.
I tired to use same way as been shown in the pytest documentation but seems I am missing something.
# Class which I am trying to mock. (./src/Trading.py)
class BaseTrade:
def __init__(self, name):
self.name = name
class Trade(BaseTrade):
def __init__ (self, name):
BaseTrade.__init__(self, name)
def get_balance(self, value):
# do calculation and return some value
# for demo purpose hard-coding it
return value * 10
#unit test (./unitest/test_test.py
import mock
import unittest
import sys
sys.path.append("../src")
import Trading
class TestTradeClass(unittest.TestCase):
#classmethod
def setUpClass(self):
self.expected_balance = 100
#classmethod
def tearDownClass(self):
pass
def test_trade(self):
with mock.patch.object(Trading.Trade, 'get_balance', new = lambda self, x: (x * 10) ) as mock_method:
obj = Trading.Trade("AAPL")
value = obj.get_balance(10)
assert value == 100
mock_method.assert_called_once_with(100)
Error on mock_method.assert_called_once_with(100)
AttributeError: 'function' object has no attribute 'assert_called_once_with'
I'm now of the belief you want side_effect. How is this? One file, assume test.py:
#!/usr/bin/env python
import unittest
import mock
class BaseTrade:
def __init__(self, name):
self.name = name
class Trade(BaseTrade):
def __init__(self, name):
BaseTrade.__init__(self, name)
def get_balance(self, value):
# do calculation and return some value
# for demo purpose hard-coding it
return value * 10
class TestTradeClass(unittest.TestCase):
#classmethod
def setUpClass(cls):
cls.expected_balance = 100
def test_trade(self):
# Without mock
obj = Trade("AAPL")
value = obj.get_balance(10)
assert value == 100
# With Trade.get_balance param patched
with mock.patch.object(
Trade, 'get_balance', side_effect=lambda value: value * 11
) as mock_method:
obj = Trade("AAPL")
value = obj.get_balance(10)
assert value == 110
mock_method.assert_called_once_with(10)
if __name__ == "__main__":
unittest.main()
chmod +x test.py
./test.py
Output:
.
----------------------------------------------------------------------
Ran 1 test in 0.000s
OK
Explanation:
Use side_effect instead of new
Combined to one file to make it easier
removing Trading.Trade
#classmethod to use cls and not self.
mock_method.assert_called_once_with(10), as side_effect cares about the value passed via obj.get_balance(10) and exists to alter the output.
Closer? If not can you clarify what you're trying to mock?
It's not easy to tell, but if Trading.Trade.get_method() is actually a vanilla function and not a method - you may need unittest.mock.create_autospec()
Are any of these of assistance?
'function' object has no attribute 'assert_called_once_with'
Python3 mock replace function with another function
It's not clear whaat Trading.Trade is.
If by chance, Trading is a class that has a self.trade = Trade(...) inside, your question would be substantially different. You'd need to get in deeper, patching Trade.get_method, then. You'd likely want to import Trade from the same module class Trading uses it (e.g. from .trading import Trading, Trade) - not from where Trade is declared from - then patch the Trade.get_method.

How do I populate a factory without knowing the classes in advance?

I would like to create a generalize factory that builds any classes defined in a script. However I'm having trouble finding all the classes.
I have factory.py:
import sys
import inspect
def imports():
for name, val in globals().items():
if isinstance(val, types.ModuleType):
yield val.__name__
def classes():
for mod in [sys.modules[__name__]] + [imports()]:
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
yield obj
class Factory:
factory_dict = {}
def __init__(self):
for xx in classes():
factory_dict[xx.__name__] = xx
print(xx.__name__)
I've defined some classes in myclasses.py
class A:...
class B:...
And I have my scratch.py:
import factory
import myclasses
class foo:...
ff = factory.Factory()
The result is this:
/Users/raysalemi/.conda/envs/untitled/bin/python scratch.py"
Factory
generator
I don't see the classes defined in myclasses.py or the class defined in scratch.py. Clearly the factory is working only on what it sees in its own module.
Is there a way to create a factory that could access all the currently defined classes?
It turns out the solution here was not to inspect the script and find the classes, but instead to have the classes add themselves to the factory_dict when they are defined.
The above changes like this:
factory.py now defines a metaclass that populates the dict:
class Factory:
factory_dict = {}
class factory_class(type):
def __init__(cls, name, bases, clsdict):
Factory.factory_dict[cls.__name__] = cls
super().__init__(name, bases, clsdict)
class factory_base(metaclass=factory_class):
pass
Classes that want to be available in the factory extend factory_base:
from factory import factory_base
class A(factory_base):...
class B(factory_base):...
The result is simpler code in scratch.py:
import factory
import myclasses
class foo:...
ff = factory.Factory()
print(factory.Factory.factory_dict)
With this output:
/python scratch.py"
{'factory_base': <class 'factory.factory_base'>, 'A': <class 'myclasses.A'>, 'B': <class 'myclasses.B'>}

Imported class populating properties from global variables

I have a class property that I want to populate at runtime, but I don't want to pass the value. What I am doing right now is:
weird_class.py
class WeirdClass:
prop = NotImplementedError
runtime_file.py
from weird_class import WeirdClass
PROP = 'somevalue'
class WeirdClassChild(WeirdClass):
prop = PROP
This works, but whenever I create a WeirdClassChild, I have to set the prop value, which is irritating.
What I would like is for WeirdClassChild to automatically pick up the PROP global variable without me specifically telling it to.
I would like to do something like:
class WeirdClass:
prop = __global_namespace__.PROP
Is this possible in python?
You can use inspection to determine the context from which your class is instantiated:
# class definition
import inspect
class Weird:
def __init__(self):
caller_frame = inspect.stack()[1]
caller_module = caller_frame[0]
self.prop = caller_module.f_globals['PROP']
# instantiation
PROP = 555
x = Weird()
print(x.prop)
=> 555
I wouldn't necessarily recommend it, but if you have a good reason to do this...
You may be able to use metaclasses:
#!/usr/bin/env python3
PROP = "only for subclasses"
class _WierdMeta(type):
# Not sure if you should use __init__ or __new__
# Use one or the other.
# `cls` is an instance of the class type that _WierdMeta creates
def __init__(cls, name, bases, dct):
if bases:
cls.prop = PROP
super().__init__(name, bases, dct)
# `cls` is _WierdMeta
def __new__(cls, name, bases, dct):
class_type = super().__new__(cls, name, bases, dct)
if bases:
class_type.prop = PROP
# this return value will be the `cls` passed to __init__ above
return class_type
class WierdBase(metaclass=_WierdMeta):
"""Base class."""
prop = "base"
class WierdChild(WierdBase):
pass
wb = WierdBase()
wc = WierdChild()
print(wb.prop) # print('base')
print(wc.prop) # print('only for subclasses')
It appears that as of Python 3.6, you can do it using __init_subclass__.
class WierdBase():
"""Base class."""
prop = "base"
# I'm not 100% on the args here...
def __init_subclass__(cls, **kwargs):
# ... or here.
super().__init_subclass__(**kwargs)
if cls is not WierdBase:
cls.prop = PROP
Based on your last comment of how you use this, why not add another base class?
from weird_class import WeirdClass
PROP = 'somevalue'
class WeirdClassChildBase(WeirdClass):
prop = PROP
class WeirdClassChild_1(WeirdClassChildBase):
pass
class WeirdClassChild_2(WeirdClassChildBase):
pass
...
I would split this into three files and follow the approach explained here: https://instructobit.com/tutorial/108/How-to-share-global-variables-between-files-in-Python :
globals.py - here you initialize the value of the PROP
weird_class.py - here you should import the globals.py and use the PROP value
start.py - here you can test the weird class, but before importing its module you should import globals.py and call some intialization method
The globals.py file will agregate all the global default values. Assuming in a start.py script first you should import globals and initialize it.
import globals
globals.initialize()
from weird_class import WeirdClass
c = WeirdClass()
print(c.prop)
In your weird_class file you can access the variable from globals.py provided you have imported it:
import globals
class WeirdClass:
prop = globals.PROP
and finally the globals.py can look like:
def initialize():
global PROP
PROP = "my global value"
The key part here is to declare global PROP before setting an initial value. The global keyword will make the variable global in the module globals.
Having global variables, constants and all magic values in a single, central place scoped with the module is often an advantage.

Python class member lazy initialization

I would like to know what is the python way of initializing a class member but only when accessing it, if accessed.
I tried the code below and it is working but is there something simpler than that?
class MyClass(object):
_MY_DATA = None
#staticmethod
def _retrieve_my_data():
my_data = ... # costly database call
return my_data
#classmethod
def get_my_data(cls):
if cls._MY_DATA is None:
cls._MY_DATA = MyClass._retrieve_my_data()
return cls._MY_DATA
You could use a #property on the metaclass instead:
class MyMetaClass(type):
#property
def my_data(cls):
if getattr(cls, '_MY_DATA', None) is None:
my_data = ... # costly database call
cls._MY_DATA = my_data
return cls._MY_DATA
class MyClass(metaclass=MyMetaClass):
# ...
This makes my_data an attribute on the class, so the expensive database call is postponed until you try to access MyClass.my_data. The result of the database call is cached by storing it in MyClass._MY_DATA, the call is only made once for the class.
For Python 2, use class MyClass(object): and add a __metaclass__ = MyMetaClass attribute in the class definition body to attach the metaclass.
Demo:
>>> class MyMetaClass(type):
... #property
... def my_data(cls):
... if getattr(cls, '_MY_DATA', None) is None:
... print("costly database call executing")
... my_data = 'bar'
... cls._MY_DATA = my_data
... return cls._MY_DATA
...
>>> class MyClass(metaclass=MyMetaClass):
... pass
...
>>> MyClass.my_data
costly database call executing
'bar'
>>> MyClass.my_data
'bar'
This works because a data descriptor like property is looked up on the parent type of an object; for classes that's type, and type can be extended by using metaclasses.
This answer is for a typical instance attribute/method only, not for a class attribute/classmethod, or staticmethod.
For Python 3.8+, how about using the cached_property decorator? It memoizes.
from functools import cached_property
class MyClass:
#cached_property
def my_lazy_attr(self):
print("Initializing and caching attribute, once per class instance.")
return 7**7**8
For Python 3.2+, how about using both property and lru_cache decorators? The latter memoizes.
from functools import lru_cache
class MyClass:
#property
#lru_cache()
def my_lazy_attr(self):
print("Initializing and caching attribute, once per class instance.")
return 7**7**8
Credit: answer by Maxime R.
Another approach to make the code cleaner is to write a wrapper function that does the desired logic:
def memoize(f):
def wrapped(*args, **kwargs):
if hasattr(wrapped, '_cached_val'):
return wrapped._cached_val
result = f(*args, **kwargs)
wrapped._cached_val = result
return result
return wrapped
You can use it as follows:
#memoize
def expensive_function():
print "Computing expensive function..."
import time
time.sleep(1)
return 400
print expensive_function()
print expensive_function()
print expensive_function()
Which outputs:
Computing expensive function...
400
400
400
Now your classmethod would look as follows, for example:
class MyClass(object):
#classmethod
#memoize
def retrieve_data(cls):
print "Computing data"
import time
time.sleep(1) #costly DB call
my_data = 40
return my_data
print MyClass.retrieve_data()
print MyClass.retrieve_data()
print MyClass.retrieve_data()
Output:
Computing data
40
40
40
Note that this will cache just one value for any set of arguments to the function, so if you want to compute different values depending on input values, you'll have to make memoize a bit more complicated.
Consider the pip-installable Dickens package which is available for Python 3.5+. It has a descriptors package which provides the relevant cachedproperty and cachedclassproperty decorators, the usage of which is shown in the example below. It seems to work as expected.
from descriptors import cachedproperty, classproperty, cachedclassproperty
class MyClass:
FOO = 'A'
def __init__(self):
self.bar = 'B'
#cachedproperty
def my_cached_instance_attr(self):
print('Initializing and caching attribute, once per class instance.')
return self.bar * 2
#cachedclassproperty
def my_cached_class_attr(cls):
print('Initializing and caching attribute, once per class.')
return cls.FOO * 3
#classproperty
def my_class_property(cls):
print('Calculating attribute without caching.')
return cls.FOO + 'C'
Ring gives lru_cache-like interface but working with any kind of descriptor supports: https://ring-cache.readthedocs.io/en/latest/quickstart.html#method-classmethod-staticmethod
class Page(object):
(...)
#ring.lru()
#classmethod
def class_content(cls):
return cls.base_content
#ring.lru()
#staticmethod
def example_dot_com():
return requests.get('http://example.com').content
See the link for more details.

Categories