Query different collection by different variable in mongoengine and Django - python

Is it possible to use variable as the part of collection name and query different collection based on the name in mongoengine?
For example:
There are 3 collections in my mongoDB
collection_first
collection_second
collection_third
and execute a simple for-loop like:
collection_names = ['first', 'second', 'third']
for name in collection_names:
## Query the collection_+`name` here
By the way, I am using mongoengin in Django, how to set the model.py of this kind of scenario?
class Testing(DynamicDocument):
# The collection_name should be dynamic, isn't it?
meta = {'collection' : 'collection_name'}
user_name = StringField(db_field='user_name')
Thank you very much.
Update the solution.
Define the Model in models.py without meta:
class Testing(DynamicDocument):
## Do NOT use the meta to point to a specific collection.
user_name = StringField(db_field='user_name')
When you call the function, use switch_collection to switch to the real collection:
def search_in_testing(self, name, **kwargs):
with switch_collection(Testing, 'colection_%s' % (name)):
search_results = Testing.objects(**kwargs)
return search_results
In your code, just call the function in for loop:
collection_names = ['first', 'second', 'third']
for name in collection_names:
search_results = search_in_testing(name, name=name)
Reference: switch_collection in mongoengine

Perhaps the following test in this commit would be of help in some way:
def test_dynamic_collection_naming(self)
def create_collection_name(cls):
return "PERSON"
class DynamicPerson(Document):
name = StringField()
age = IntField()
meta = {'collection': create_collection_name}
collection = DynamicPerson._get_collection_name()
self.assertEquals(collection, 'PERSON')
DynamicPerson(name='Test User', age=30).save()
self.assertTrue(collection in self.db.collection_names())

Yes you can do it like this. As an example,
for name in collection_names:
for doc in db[collection_+'name'].find():
print doc
Here db is Database object.

Related

Odoo: How to create many records in Transient.Model?

This code only creates one record. What is wrong?
class PartnerTagCreate(models.TransientModel):
""" Choose tags to be added to partner."""
_name = 'partner.tags.create'
_description = __doc__
market_id = fields.Many2one('partner.tags', string='Market Tag')
application_id = fields.Many2one('partner.tags', string='Application Tag')
partner_id = fields.Integer()
#api.multi
def create_contact_tag(self):
for record in self.env['sale.order.line'].browse(self._context.get('active_ids', [])):
vals = {}
vals['partner_id'] = record.order_partner_id
self.write(vals)
return True
I need this function to create one record for each order_partner_id I selected before opening the wizard...
How to achieve that?
Here my new code (function) ...
def create_contact_tag(self):
sale_order_line_ids = self.env['sale.order.line'].browse(self._context.get('active_ids', []))
for partner in sale_order_line_ids:
values = {}
values['partner_id'] = partner.order_partner_id
self.create(values)
return {}
This creates one record for marketing_id and/or application_id and dedicated records for each partner_id in the record.
You use the 'create' method to create new records; this is the same for TransientModel as for the persistent Model.
So, replace
self.write(vals)
by
self.create(vals)
and you should be fine.

Can I lookup a related field using a Q object in the Django ORM?

In Django, can I re-use an existing Q object on multiple models, without writing the same filters twice?
I was thinking about something along the lines of the pseudo-Django code below, but did not find anything relevant in the documentation :
class Author(Model):
name = TextField()
company_name = TextField()
class Book(Model):
author = ForeignKey(Author)
# Create a Q object for the Author model
q_author = Q(company_name="Books & co.")
# Use it to retrieve Book objects
qs = Book.objects.filter(author__matches=q_author)
If that is not possible, can I extend an existing Q object to work on a related field? Pseudo-example :
# q_book == Q(author__company_name="Books & co.")
q_book = q_author.extend("author")
# Use it to retrieve Book objects
qs = Book.objects.filter(q_book)
The only thing I've found that comes close is using a subquery, which is a bit unwieldy :
qs = Book.objects.filter(author__in=Author.objects.filter(q_author))
From what I can tell by your comment, it just looks like you're trying to pass a set of common arguments to multiple filters, to do that you can just unpack a dictionary
The values in the dictionary can still be q objects if required as if it were a value you would pass in to the filter argument normally
args = { 'author__company_name': "Books & co" }
qs = Book.objects.filter(**args)
args['author_name'] = 'Foo'
qs = Book.objects.filter(**args)
To share this between different models, you'd have to do some dictionary mangling
author_args = { k.lstrip('author__'): v for k, v in args.items }
You can do this
books = Book.objects.filter(author__company_name="Books & co")

How to save data as BlobProperty instead of multiple ListProperties?

Currently I have the following code:
class User(db.Model):
field_names = db.StringListProperty(indexed=False)
field_values = db.StringListProperty(indexed=False)
field_scores = db.ListProperty(int, indexed=False)
def fields_add(user_key_name, field_name, field_value, field_score):
user = User.get(user_key_name)
if user:
try:
field_index = user.field_names.index(field_name) # (1)
user.field_values[field_index] = field_value
user.field_scores[field_index] = field_score
except ValueError:
# field wasn't added to the list before
user.field_names.append(field_name)
user.field_values.append(field_value)
user.field_scores.append(field_score)
user.put()
It works well, but I would like to optimize that - serialize field_name, field_value and field_score and store in one BlobProperty:
class User(db.Model):
fields = db.ListProperty(indexed=False)
f = {
'f': field_name,
'v': field_value,
's': field_score,
}
user.fields = simplejson.dumps(f)
But how should code (1) look like with such approach? How to find record for update?
If user.fields is a list of dicts where 'f' is the field name, this is one possible answer to your immediate question:
field_index = [field['f'] for field in user.fields].index(field_name)
It's not immediately clear why your revision is more optimal in your case, but I'll take your word for it. :)
You can serialize objects with json or pickle.
So for instance. If your model holds a property : udata = db.BlobProperty()
The serialize an object like : ..udata = pickle.dumps(object).

Generic relation with sqlalchemy as in django contenttypes

I'm trying to make some generic apps using Sql Alchemy, such as tags or rating for any model. But I couldn't find any help in the docs. I really liked what I could do with the django contenttypes framework ? Is there any similar functionality in Sql Alchemy ?
I once wrote some example code about something similar to this (see http://taketwoprogramming.blogspot.com/2009/08/reusable-sqlalchemy-models.html).
The basic idea is that you can create a model like this:
#commentable
class Post(Base):
__tablename__ = 'posts'
id = sa.Column(sa.Integer, primary_key=True)
text = sa.Column(sa.String)
...where commentable is defined like this...
class BaseComment(object):
pass
def build_comment_model(clazz):
class_table_name = str(class_mapper(clazz).local_table)
metadata = clazz.metadata
comment_class_name = clazz.__name__ + 'Comment'
comment_class = type(comment_class_name, (BaseComment,), {})
comment_table_name = class_table_name + '_comments'
comment_table = sa.Table(comment_table_name, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column(class_table_name + '_id',
sa.Integer,
sa.ForeignKey(class_table_name + '.id')),
sa.Column('text', sa.String),
sa.Column('name', sa.String(100)),
sa.Column('url', sa.String(255)),
)
mapper(comment_class, comment_table)
return comment_class, comment_table
def commentable(clazz):
comment_class, comment_table = build_comment_model(clazz)
clazz.Comment = comment_class
setattr(clazz, 'comments', relation(comment_class))
def add_comment(self, comment):
self.comments.append(comment)
setattr(clazz, 'add_comment', add_comment)
return clazz
Basically, the commentable decorator dynamically creates a new type and table, along with some helper methods to the decorated class. This is the test I used to test that the code works, which shows some example of how it would work...
class TestModels(SATestCase):
def test_make_comment(self):
p = Post()
p.text = 'SQLAlchemy is amazing!'
text = 'I agree!'
name = 'Mark'
url = 'http://www.sqlalchemy.org/'
c = Post.Comment()
c.text = text
c.name = name
c.url = url
p.add_comment(c)
Session.add(p)
# This is a method I use to force the reload of the objects from
# the database to make sure that when I test them, I'm actually
# pulling from the database rather than just getting the data
# of the object still in the session.
p = self.reload(p)
self.assertEquals(len(p.comments), 1)
c = p.comments[0]
self.assertEquals(c.text, text)
self.assertEquals(c.name, name)
self.assertEquals(c.url, url)
I wrote this awhile ago, but I don't think there's anything in SQLA that will do this kind of thing for you, but you can create something similar without too much trouble. In my example, I created new mapped classes and methods to use it on the fly in a class decorator.
I never really made much use out of it, but it might give you some ideas.

SQLAlchemy - MappedCollection problem

I have some problems with setting up the dictionary collection in Python's SQLAlchemy:
I am using declarative definition of tables. I have Item table in 1:N relation with Record table. I set up the relation using the following code:
_Base = declarative_base()
class Record(_Base):
__tablename__ = 'records'
item_id = Column(String(M_ITEM_ID), ForeignKey('items.id'))
id = Column(String(M_RECORD_ID), primary_key=True)
uri = Column(String(M_RECORD_URI))
name = Column(String(M_RECORD_NAME))
class Item(_Base):
__tablename__ = 'items'
id = Column(String(M_ITEM_ID), primary_key=True)
records = relation(Record, collection_class=column_mapped_collection(Record.name), backref='item')
Now I want to work with the Items and Records. Let's create some objects:
i1 = Item(id='id1')
r = Record(id='mujrecord')
And now I want to associate these objects using the following code:
i1.records['source_wav'] = r
but the Record r doesn't have set the name attribute (the foreign key). Is there any solution how to automatically ensure this? (I know that setting the foreign key during the Record creation works, but it doesn't sound good for me).
Many thanks
You want something like this:
from sqlalchemy.orm import validates
class Item(_Base):
[...]
#validates('records')
def validate_record(self, key, record):
assert record.name is not None, "Record fails validation, must have a name"
return record
With this, you get the desired validation:
>>> i1 = Item(id='id1')
>>> r = Record(id='mujrecord')
>>> i1.records['source_wav'] = r
Traceback (most recent call last):
[...]
AssertionError: Record fails validation, must have a name
>>> r.name = 'foo'
>>> i1.records['source_wav'] = r
>>>
I can't comment yet, so I'm just going to write this as a separate answer:
from sqlalchemy.orm import validates
class Item(_Base):
[...]
#validates('records')
def validate_record(self, key, record):
record.name=key
return record
This is basically a copy of Gunnlaugur's answer but abusing the validates decorator to do something more useful than exploding.
You have:
backref='item'
Is this a typo for
backref='name'
?

Categories