I need to get list of model fields like:
#instance.register
class Todo(Document):
title = fields.StringField(required=True, default='Name')
description = fields.StringField()
created_at = fields.DateTimeField()
created_by = fields.StringField()
priority = fields.IntegerField()
to
[
'title',
'description',
'created_at',
'created_by',
'priority'
]
So, I have function that returns list of fields
def get_class_properties(cls):
attributes = inspect.getmembers(cls, lambda a: not (inspect.isroutine(a)))
return [attr for attr in attributes if not (attr[0].startswith('__') and attr[0].endswith('__'))][1]
But usage gives me this error
umongo.exceptions.NoDBDefinedError: init must be called to define a db
Usage:
properties=get_class_properties(Todo)
UPD
Here is my mongo initialization code:
async def mongo_client(app):
conf = app["config"]["mongo"]
client = AsyncIOMotorClient(host=conf["host"], port=conf["port"])
db = client[conf["db"]]
instance.init(db)
await Todo.ensure_indexes()
app["db_client"]: AsyncIOMotorClient = client
app["db"] = db
yield
await app["db_client"].close()
This is a copy/paste of this answer from the author of this library:
As far as I remeber, this exception raises when you're trying to use
lazy clients without initializing them properly. Any lazy class of
uMongo expects that the used database will be specified before the
usage. Everything that you need is to specify the used database and
invoke the init method of your lazy instance, like this:
from motor.motor_asyncio import AsyncIOMotorClient
from umongo import MotorAsyncIOInstance
client = AsyncIOMotorClient("mongodb://user:password#host:port/")
client = client["test_database"]
lazy_umongo = MotorAsyncIOInstance()
lazy_umongo.init(client)
As an example you can look into Auth/Auth microservice code, where
documents defined and store in the separate files from the actual
usage. Also these files with code as examples (documents.py and
prepare_mongodb.py) will help you to find a solution.
The trick was that
properties=get_class_properties(Todo)
invokes earlier than
async def mongo_client(app):
Solution is use things in right order (see comments to code)
async def init_app(argv=None):
app = web.Application(middlewares=[deserializer_middleware], logger=logger)
app["config"] = config
conf = app["config"]["mongo"]
client = AsyncIOMotorClient(host=conf["host"], port=conf["port"])
db = client[conf["db"]]
instance.init(db)
# Remove this line:
# app.cleanup_ctx.append(mongo_client)
app.cleanup_ctx.append(api_client)
register_routes(app)
return app
def register_routes(app: web.Application):
# Use here:
todo_resource = RestResource(
entity='todo',
factory=Todo,
properties=get_class_properties(Todo)
)
todo_resource.register(app.router)
Related
I am developing a Django app (Django v3.2.10, pytest v7.0.1, pytest-django v4.5.2) which uses cursor to perform raw queries to my secondary DB: my_db2, but when running tests, all the queries return empty results, like if they were running on parallel transactions.
My test file:
#pytest.mark.django_db(transaction=True, databases=['default', 'my_db2'])
class TestItems:
def test_people(self):
person1 = PeopleFactory() # Adds 1 person to my_db2
assert fetch_all_persons() == 1 # Fails Returns 0
My Factory:
class PeopleFactory(factory.django.DjangoModelFactory):
id = factory.Sequence(lambda x: x + 1)
name = factory.Faker('first_name')
class Meta:
model = People
My function:
from django.db import connections
def fetch_all_persons():
with connections['my_db2'].cursor() as cursor:
cursor.execute(f"SELECT * FROM Persons")
return len(list(cursor.fetchall())):
According documentation transaction=True should prevent this issue, but it doesn't, does somebody know how to fix it?
Note.- Using the ORM is not an option, this is just a simplified example to represent the issue. The real queries used are way more complex.
#hoefling and #Arkadiusz Ćukasiewicz were right, I just needed to add the corresponding DB within the factories:
class PeopleFactory(factory.django.DjangoModelFactory):
id = factory.Sequence(lambda x: x + 1)
name = factory.Faker('first_name')
class Meta:
model = People
database = 'my_db2'
Thank you both.
I am relatively new to Django, but not to python, My model is trying to use a class (defined in a separate file) in which data is coming from a REST API, the retrieved data is in a nested dictionary. The code will run fine in python, but when I try it in Django (makemigrations), I get an error:
File "c:\blah-blah\Clone_PR.py", line 20, in GetFoundOnSelectItems
values = self._issueEdit["fields"]["customfield_13940"]["allowedValues"]
TypeError: 'NoneType' object is not subscriptable
I tried using type hints, but that does not work either.
models.py
from dal import autocomplete
from django.db import models
from django.contrib import messages
from .Login import jlogin
from .Jira_Constants import ProductionServer, TestServer, StageServer
from .Clone_PR import Issue
jira = None
issue = Issue()
class ClonePrLogin(models.Model):
username = models.CharField(max_length=30)
password = models.CharField(max_length=30)
#classmethod
def LoginToJira(cls):
global jira
jira = jlogin(ProductionServer, cls.username, cls.password)
class PrEntry(models.Model):
prToClone = models.CharField(max_length=20)
#classmethod
def GetIssueAndMeta(cls):
global issue
issue.initialize(jira, cls.prToClone)
class ClonePr(models.Model):
issueKey = issue.issueKey
issue.GetFoundOnSelectItems()
foundOnList = issue.foundOnSelectItems
foundOn = autocomplete.Select2ListChoiceField(choice_list=foundOnList)
Clone_PR.py
from typing import List, Dict
class Issue():
def __init__(self):
self.jiraInst = None
self.issueKey = ''
self._issue = None
self._issueEdit = None
# self._issueEdit = Dict[str, Dict[str, Dict[str, List[Dict[str, str]]]]]
self.foundOnSelectItems = []
def initialize(self, jira, prKey):
self.jiraInst = jira
self.issueKey = prKey
self._issue = jira.issue(prKey)
self._issueEdit = jira.editmeta(prKey)
def GetFoundOnSelectItems(self):
values = self._issueEdit["fields"]["customfield_13940"]["allowedValues"]
items = [x["value"] for x in values]
self.foundOnSelectItems = items
In Django, running makemigrations will load all the modules. You said you're familiar with Python so you should know that the declarations inside the class:
class ClonePr(models.Model):
issueKey = issue.issueKey
issue.GetFoundOnSelectItems()
foundOnList = issue.foundOnSelectItems
foundOn = autocomplete.Select2ListChoiceField(choice_list=foundOnList)
will run when the modules load. You're calling issue.GetFoundOnSelectItems() at that time, which in turn calls values = self._issueEdit["fields"]["customfield_13940"]["allowedValues"], except that self._issueEdit = None upon the initiation of instance Issue above with this line: issue = Issue().
I highly recommend you spend some time to become more familiar with how Django starts up an app. The module-level and nested model declarations here are both antipatterns and may cause data issues in the future.
I am trying to mock some 3rd part library on setup so i can pretend it works as expected on my code.
I was able to mock it locally on which i configure all the returns on the function itself
class MockConnecton:
def __init__(self):
self._ch = Mock()
def channel(self):
return self._ch
class QEmiterTest(unittest.TestCase):
#patch('task_queues.queue.pika.BlockingConnection')
#patch('task_queues.queue.pika.ConnectionParameters')
def test_emiter(self,mock_params,mock_block):
config = {
'host':'mq',
'exchange':'test'
}
params = {"FOO":"BAR"}
mock_params.return_value = params
conn = MockConnecton()
mock_conn = Mock(wraps=conn)
mock_block.return_value = mock_conn
emitter = QEmitter(config['host'],config['exchange'])
mock_params.assert_called_with(config['host'])
mock_block.assert_called_with(params)
mock_conn.channel.assert_called_with()
conn._ch.exchange_declare.assert_called_with(exchange=config['exchange'],type='topic')
But when i try to move from this approach to a cleaner one with the mock start/stop i receive an error on the assertion:
AttributeError: '_patch' object has no attribute 'assert_called_with'
I am trying to port it like this
class QEmiterTest(unittest.TestCase):
def setUp(self):
mock_params = patch('task_queues.queue.pika.ConnectionParameters')
mock_block = patch('task_queues.queue.pika.BlockingConnection')
self.params_ret = {"FOO":"BAR"}
mock_params.return_value = self.params_ret
conn = MockConnecton()
self.mock_conn = Mock(wraps=conn)
mock_block.return_value = self.mock_conn
self.patch_params = mock_params
self.patch_block = mock_block
self.patch_params.start()
self.patch_block.start()
def test_emiter(self):
config = {
'host':'mq',
'exchange':'test'
}
emitter = QEmitter(config['host'],config['exchange'])
self.patch_params.assert_called_with(config['host'])
self.patch_block.assert_called_with(self.params_ret)
self.mock_conn.channel.assert_called_with()
self.mock_conn._ch.exchange_declare.assert_called_with(exchange=config['exchange'],type='topic')
def tearDown(self):
self.patch_params.stop()
self.patch_block.stop()
I may not fully understand the start and stop, i was under the assumpton that on setup it would apply the patch and by it's reference i would be able to make assertions . I also welcome any suggestons on how to make several mocks cleaner
patch object are patch and not mock. mock framework have two main duties:
Mock object used to record the call and follow your screenplay
Patch methods and object used to replace reference by something that you can use to sensor or simulate some behaviors
A lot of time we can use patch to install mocks... but patch are not mock.
patch.start() return the new reference (often a mock) used to patch the original one.
def setUp(self):
self.params_ret = {"FOO":"BAR"}
self.mock_conn = Mock(wraps=conn)
self.patch_params = patch('task_queues.queue.pika.ConnectionParameters', return_value = self.params_ret)
self.patch_block = patch('task_queues.queue.pika.BlockingConnection', return_value=self.mock_conn)
mock_params = self.patch_params.start()
mock_block = self.patch_block.start()
I am trying to write a logging system, which uses dynamic classes to make tables. Getting the classes created, and the tables created seems to be working fine, but trying to put entries into them is lead to an error message regarding mapping, below is the sample code and the error message.
Base = declarative_base()
#my init function
def tableinit(self,keyargs):
self.__dict__ = dict(keyargs)
#table creation
tableName = "newTable"
columnsDict["__tablename__"] = tableName
columnsDict["__init__"] = tableinit
columnsDict["id"] = Column("id",Integer, autoincrement = True, nullable = False, primary_key=True)
columnsDict["pid"] = Column("pid",Integer, ForeignKey('someparenttable.id')) #someparenttable is created with a hard coded class
newTable = type(tableName,(Base,),columnsDict)
tableClassDict[tableName]=newTable
#when doing an entry
newClassInst = subEntryClassDict[tableName]
newEntry = newClassInst(dataDict)
entryList.append(newEntry) # this is called in a for loop with the entries for someparenttable's entries also
self.session.add_all(entryList) # at this point the error occurs
The error:
UnmappedInstanceError: Class 'newTable' is mapped, but this instance lacks instrumentation. This occurs when the instance is created before sqlalchemy.orm.mapper(module.newTable) was called.
This is easier if you create a function to return a class that you set up normally. I've tried something like this and it works:
def getNewTable( db, table ):
class NewTable( Base ):
__tablename__ = table
__table_args__ = { 'schema': db }
id = Column( ...
return NewTable
newClassInst = getNewTable( 'somedb', 'sometable' )
newRow = newClassInst( data )
This problem is caused by lack of instruments function interfaces for the orm as the error description says. And it is actually caused by self.__dict__ = dict(keyargs) I think.
So this can be solved by reconstruct the init, which do not modify the injected functions by ORM.
Turn this
#my init function
def tableinit(self,keyargs):
self.__dict__ = dict(keyargs)
To
#my init function
def tableinit(self,**kwargs):
self.__dict__.update(kwargs)
I'm trying to make some generic apps using Sql Alchemy, such as tags or rating for any model. But I couldn't find any help in the docs. I really liked what I could do with the django contenttypes framework ? Is there any similar functionality in Sql Alchemy ?
I once wrote some example code about something similar to this (see http://taketwoprogramming.blogspot.com/2009/08/reusable-sqlalchemy-models.html).
The basic idea is that you can create a model like this:
#commentable
class Post(Base):
__tablename__ = 'posts'
id = sa.Column(sa.Integer, primary_key=True)
text = sa.Column(sa.String)
...where commentable is defined like this...
class BaseComment(object):
pass
def build_comment_model(clazz):
class_table_name = str(class_mapper(clazz).local_table)
metadata = clazz.metadata
comment_class_name = clazz.__name__ + 'Comment'
comment_class = type(comment_class_name, (BaseComment,), {})
comment_table_name = class_table_name + '_comments'
comment_table = sa.Table(comment_table_name, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column(class_table_name + '_id',
sa.Integer,
sa.ForeignKey(class_table_name + '.id')),
sa.Column('text', sa.String),
sa.Column('name', sa.String(100)),
sa.Column('url', sa.String(255)),
)
mapper(comment_class, comment_table)
return comment_class, comment_table
def commentable(clazz):
comment_class, comment_table = build_comment_model(clazz)
clazz.Comment = comment_class
setattr(clazz, 'comments', relation(comment_class))
def add_comment(self, comment):
self.comments.append(comment)
setattr(clazz, 'add_comment', add_comment)
return clazz
Basically, the commentable decorator dynamically creates a new type and table, along with some helper methods to the decorated class. This is the test I used to test that the code works, which shows some example of how it would work...
class TestModels(SATestCase):
def test_make_comment(self):
p = Post()
p.text = 'SQLAlchemy is amazing!'
text = 'I agree!'
name = 'Mark'
url = 'http://www.sqlalchemy.org/'
c = Post.Comment()
c.text = text
c.name = name
c.url = url
p.add_comment(c)
Session.add(p)
# This is a method I use to force the reload of the objects from
# the database to make sure that when I test them, I'm actually
# pulling from the database rather than just getting the data
# of the object still in the session.
p = self.reload(p)
self.assertEquals(len(p.comments), 1)
c = p.comments[0]
self.assertEquals(c.text, text)
self.assertEquals(c.name, name)
self.assertEquals(c.url, url)
I wrote this awhile ago, but I don't think there's anything in SQLA that will do this kind of thing for you, but you can create something similar without too much trouble. In my example, I created new mapped classes and methods to use it on the fly in a class decorator.
I never really made much use out of it, but it might give you some ideas.