I have a model where I want to use a class method to set the default of for a property:
class Organisation(db.Model):
name=db.StringProperty()
code=db.StringProperty(default=generate_code())
#classmethod
def generate_code(cls):
import random
codeChars='ABCDEF0123456789'
while True: # Make sure code is unique
code=random.choice(codeChars)+random.choice(codeChars)+\
random.choice(codeChars)+random.choice(codeChars)
if not cls.all().filter('code = ',code).get(keys_only=True):
return code
But I get a NameError:
NameError: name 'generate_code' is not defined
How can I access generate_code()?
As I said in a comment, I would use a classmethod to act as a factory and always create you entity through there. It keeps things simpler and no nasty hooks to get the behaviour you want.
Here is a quick example.
class Organisation(db.Model):
name=db.StringProperty()
code=db.StringProperty()
#classmethod
def generate_code(cls):
import random
codeChars='ABCDEF0123456789'
while True: # Make sure code is unique
code=random.choice(codeChars)+random.choice(codeChars)+\
random.choice(codeChars)+random.choice(codeChars)
if not cls.all().filter('code = ',code).get(keys_only=True):
return code
#classmethod
def make_organisation(cls,*args,**kwargs):
new_org = cls(*args,**kwargs)
new_org.code = cls.generate_code()
return new_org
import random
class Test(object):
def __new__(cls):
cls.my_attr = cls.get_code()
return super(Test, cls).__new__(cls)
#classmethod
def get_code(cls):
return random.randrange(10)
t = Test()
print t.my_attr
You need specify the class name: Organisation.generate_code()
Related
I want to take advantage of polymorphism where the class that should be used for a certain action is defined in the database. Basically I want something that can work like this:
class Example:
#staticmethod
def do_something():
# Does something
s = "Example"
# Do magic here that makes s reference the class instead of just being a string
s.do_something()
Obviously, there probably needs to be some code to check it actually is a class that's defined and all that.
You can check if the class name exists in globals():
class Example:
#staticmethod
def do_something():
print("Hello from ExampleClass")
li = ["Example", "WrongExample"]
for cls in li:
if cls in globals():
print(f"Calling class {cls}:")
globals()[cls]().do_something()
else:
print(f"Class {cls} does not exists.")
# Calling class Example:
# Hello from ExampleClass
# Class WrongExample does not exists.
Say I have 2 different implementations of a class
class ParentA:
def initialize(self):
pass
def some_event(self):
pass
def order(self, value):
# handle order in some way for Parent A
class ParentB:
def initialize(self):
pass
def some_event(self):
pass
def order(self, value):
# handle order in another for Parent B
How can I dynamically let some 3rd class inherit from either ParentA or ParentB based on something like this?
class MyCode:
def initialize(self):
self.initial_value = 1
def some_event(self):
# handle event
order(self.initial_value)
# let MyCode inherit from ParentA and run
run(my_code, ParentA)
Simply store the class-object in a variable (in the example below, it is named base), and use the variable in the base-class-spec of your class statement.
def get_my_code(base):
class MyCode(base):
def initialize(self):
...
return MyCode
my_code = get_my_code(ParentA)
Also, you can use type builtin. As callable, it takes arguments: name, bases, dct (in its simplest form).
def initialize(self):
self.initial_value = 1
def some_event(self):
# handle event
order(self.initial_value)
subclass_body_dict = {
"initialize": initialize,
"some_event": some_event
}
base_class = ParentA # or ParentB, as you wish
MyCode = type("MyCode", (base_class, ), subclass_body_dict)
This is more explicit than snx2 solution, but still - I like his way better.
PS. of course, you dont have to store base_class, nor subclass_body_dict, you can build those values in type() call like:
MyCode = type("MyCode", (ParentA, ), {
"initialize": initialize,
"some_event": some_event
})
Just as a quick copy-and-paste-ready snippet, I've added the comments from shx2's answer to create this (memoized with a created_classes dict attribute, so that the classes created by successive identical calls with the same class will give identical classes):
class ParentA:
val = "ParentA"
class ParentB:
val = "ParentB"
class DynamicClassCreator():
def __init__(self):
self.created_classes = {}
def __call__(self, *bases):
rep = ",".join([i.__name__ for i in bases])
if rep in self.created_classes:
return self.created_classes[rep]
class MyCode(*bases):
pass
self.created_classes[rep] = MyCode
return MyCode
creator = DynamicClassCreator()
instance1 = creator(ParentA, ParentB)()
print(instance1.val) #prints "ParentA"
instance2 = creator(ParentB, ParentA)()
print(instance2.val) #prints "ParentB"
If you wanted to get fancy you could even make DynamicClassCreator a Singleton: https://stackoverflow.com/a/7346105/5122790
As an alternative to Chris's answer implementing the memoisation suggestion for shx2's answer, I'd prefer to use a memoize decorator (the end result is still a class but it's clearer to me that the function is the interface), and also use setdefault to simplify adding to the memo dict, and do not convert the names to string but use the tuple bases itself as the key, simplifying the code to:
class Memoize:
def __init__(self, f):
self.f = f
self.memo = {}
def __call__(self, *args):
return self.memo.setdefault(args, self.f(*args))
class ParentA:
def initialize(self):
pass
class ParentB:
def initialize(self):
pass
#Memoize
def get_my_code(base):
class MyCode(base):
def initialize(self):
pass
return MyCode
a1 = get_my_code(ParentA)
a2 = get_my_code(ParentA)
b1 = get_my_code(ParentB)
print(a1 is a2) # True
print(a1 is b1) # False
(Not a good example as the code provided doesn't actually do anything other than overwrite the parent class's initialize method...)
I just started learning Python and I don't quite understand where the problem in this code is. I have a base class Proband with two methods and I want to create a subclass Gesunder and I want to override the attributes idn,artefakte.
import scipy.io
class Proband:
def __init__(self,idn,artefakte):
self.__idn = idn
self.artefakte = artefakte
def getData(self):
path = 'C:\matlab\EKGnurbild_von Proband'+ str(self.idn)
return scipy.io.loadmat(path)
def __eq__(self,neueProband):
return self.idn == neueProband.idn and self.artefakte == neueProband.artefakte
class Gesunder(Proband):
def __init__(self,idn,artefakte,sportler):
super().__init__(self,idn,artefakte)
self.__sportler = sportler
hans = Gesunder(2,3,3)
You have 2 problems in your code. In python 2:
super() takes 2 arguments: the class name, and the instance
in order to use super(), the base class must inherit from object
So your code becomes:
import scipy.io
class Proband(object):
def __init__(self,idn,artefakte):
self.__idn = idn
self.artefakte = artefakte
def getData(self):
path = 'C:\matlab\EKGnurbild_von Proband'+ str(self.idn)
return scipy.io.loadmat(path)
def __eq__(self,neueProband):
return self.idn == neueProband.idn and self.artefakte == neueProband.artefakte
class Gesunder(Proband):
def __init__(self,idn,artefakte,sportler):
super(Gesunder, self).__init__(idn,artefakte)
self.__sportler = sportler
hans = Gesunder(2,3,3)
Note the the call to super(Gesunder, self).__init__ does not have self as the first argument.
In Python 2, super() on its own is invalid. Instead, you must use super(ClassName, self).
super(Gesunder, self).__init__(self, idn, artefakte)
The super() call should be modified to :
super(Gesunder, self).__init__(self, idn, artefakte)
Is there a way to mock just the default parameters of a class constructor? For example, if I have this class:
class A (object):
def __init__(self, details='example'):
self.details = details
Is there a way to a mock just the default value of the details argument, eg to details='test'?
Surely this is simplest:
class TestA (A):
def __init__(self, details='test'):
super(TestA, self).__init__(details)
If you're not able to use TestA without changing code elsewhere, then you could try something a bit more direct:
>>> A().details
'example'
>>> A.__old_init = A.__init__
>>> A.__init__ = lambda self, details='test': self.__old_init(details)
>>> A().details
'test'
If your willing to go that far, though, why not do it the tidy way?
class A (object):
_DEFAULT_DETAILS = 'details'
def __init__(self, details=None):
if details is None:
self.details = self._DEFAULT_DETAILS
else:
self.details = details
Now you can override the default value without resorting to any trickery:
A._DEFAULT_DETAILS = 'test'
Would a new mock class do?
class Amock(A):
def __init__(self, details='newdefault'):
super(Amock, self).__init__(details=details)
OK,
I know this is mentioned in the manual, and probably has to do with side_effect and/or return_value, but a simple, direct example will help me immensely.
I have:
class ClassToPatch():
def __init__(self, *args):
_do_some_init_stuff()
def some_func():
_do_stuff()
class UUT():
def __init__(self, *args)
resource_1 = ClassToPatch()
resource_2 = ClassToPatch()
Now, I want to unit test the UUT class, and mock the ClassToPatch. Knowing the UUT class will instantiate exactly two ClassToPatch objects, I want the Mock framework to return a new Mock object for each instantiation, so I can later assert calls on each separately.
How do I achieve this using the #patch decorator in a test case? Namely, how to fix the following code sample?
class TestCase1(unittest.TestCase):
#patch('classToPatch.ClassToPatch',autospec=True)
def test_1(self,mock1,mock2):
_assert_stuff()
Here's a quick'n'dirty example to get you going:
import mock
import unittest
class ClassToPatch():
def __init__(self, *args):
pass
def some_func(self):
return id(self)
class UUT():
def __init__(self, *args):
resource_1 = ClassToPatch()
resource_2 = ClassToPatch()
self.test_property = (resource_1.some_func(), resource_2.some_func())
class TestCase1(unittest.TestCase):
#mock.patch('__main__.ClassToPatch', autospec = True)
def test_1(self, mock1):
ctpMocks = [mock.Mock(), mock.Mock()]
ctpMocks[0].some_func.return_value = "funky"
ctpMocks[1].some_func.return_value = "monkey"
mock1.side_effect = ctpMocks
u = UUT()
self.assertEqual(u.test_property, ("funky", "monkey"))
if __name__ == '__main__':
unittest.main()
I've added test_property to UUT so that the unit test does something useful. Now, without the mock test_property should be a tuple containing the ids of the two ClassToPatch instances. But with the mock it should be the tuple: ("funky", "monkey").
I've used the side_effect property of the mock object so that a different instance of ClassToPatch is returned on each call in the UUT initialiser.
Hope this helps.
Edit: Oh, by the way, when I run the unit test I get:
.
----------------------------------------------------------------------
Ran 1 test in 0.004s
OK
Here is another version which is more generic to handle any number of instances created:
class TestUUT:
def test_init(self, mocker):
class MockedClassToPatchMeta(type):
static_instance = mocker.MagicMock(spec=ClassToPatch)
def __getattr__(cls, key):
return MockedClassToPatchMeta.static_instance.__getattr__(key)
class MockedClassToPatch(metaclass=MockedClassToPatchMeta):
original_cls = ClassToPatch
instances = []
def __new__(cls, *args, **kwargs):
MockedClassToPatch.instances.append(
mocker.MagicMock(spec=MockedClassToPatch.original_cls))
MockedClassToPatch.instances[-1].__class__ = MockedClassToPatch
return MockedClassToPatch.instances[-1]
mocker.patch(__name__ + '.ClassToPatch', new=MockedClassToPatch)
UUT()
# since your original code created two instances
assert 2 == len(MockedClassToPatch.instances)
If you need more thorough validation for each instance you can access MockedClassToPatch.instances[0] or MockedClassToPatch.instances[1].
I've also created a helper library to generate the meta class boilerplate for me. To generate the needed code for your example I wrote:
print(PytestMocker(mocked=ClassToPatch, name=__name__).mock_classes().mock_classes_static().generate())