I'm using python-storm as orm. The many-to-many reference set is giving me headaches :(
These are the relevant objects:
class Author(object):
__storm_table__ = "author"
id = Int(primary=True)
name = Unicode()
institution_id = Int()
institution = Reference(institution_id, Institution.id)
def __init__(self, name):
self.name = name
class Paper(object):
__storm_table__ = "paper"
id = Int(primary=True)
name = Unicode()
conference_id = Int()
conference = Reference(conference_id, Conference.id)
def __init__(self, name):
self.name = name
class AuthorPapers(object):
__storm_table__ = "authorpapers"
__storm_primary__ = "author_id", "paper_id"
author_id = Int()
paper_id = Int()
The respective sqlite table look like this
store.execute("CREATE TABLE if not exists author (id INTEGER PRIMARY KEY, name VARCHAR, institution_id INTEGER, FOREIGN KEY (institution_id) REFERENCES institution(id))")
store.execute("CREATE TABLE if not exists paper (id INTEGER PRIMARY KEY, name VARCHAR, conference_id INTEGER, FOREIGN KEY (conference_id) REFERENCES conference(id))")
store.execute("CREATE TABLE if not exists authorpapers (author_id INTEGER, paper_id INTEGER, PRIMARY KEY (author_id, paper_id))")
Now say if a have two author the collaborated on a paper
a = Author(u"Steve Rogers")
b = Author(u"Captain America")
and a paper
p6 = Paper(u"Bunga Bunga")
So now I want to associate both author to the paper using
Author.papers = ReferenceSet(Author.id, AuthorPapers.author_id, Paper.id, AuthorPapers.paper_id)
and doing this
a.papers.add(p6)
b.papers.add(p6)
This is btw it says it is supposed to work in the storm tutorial...but I get
File "/usr/lib64/python2.7/site-packages/storm/references.py", line 376, in add
self._relation2.link(remote, link, True)
File "/usr/lib64/python2.7/site-packages/storm/references.py", line 624, in link
pairs = zip(self._get_local_columns(local.__class__),
File "/usr/lib64/python2.7/site-packages/storm/references.py", line 870, in _get_local_columns
for prop in self.local_key)
File "/usr/lib64/python2.7/site-packages/storm/references.py", line 870, in <genexpr>
for prop in self.local_key)
File "/usr/lib64/python2.7/site-packages/storm/properties.py", line 53, in __get__
return self._get_column(cls)
File "/usr/lib64/python2.7/site-packages/storm/properties.py", line 97, in _get_column
attr = self._detect_attr_name(cls)
File "/usr/lib64/python2.7/site-packages/storm/properties.py", line 82, in _detect_attr_name
raise RuntimeError("Property used in an unknown class")
RuntimeError: Property used in an unknown class
And I'm not really able to make sense of this right now.
I'm not really, familiar with storm, but looking at the documentation example, looks like is just an issue related to the order in which the arguments to ReferenceSet are passed. I tried to use this:
Author.papers = ReferenceSet(Author.id, AuthorPapers.author_id, AuthorPapers.paper_id, Paper.id)
instead of this:
Author.papers = ReferenceSet(Author.id, AuthorPapers.author_id, Paper.id, AuthorPapers.paper_id)
and no exception was raised.
Related
I have 2 model classes as below:
class Domain(db.Model):
__tablename__ = 'domain'
id = db.Column(db.Integer, primary_key=True)
domain_name = db.Column(db.String(30), unique=True)
mailboxes = db.Column(db.Integer, default=0)
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
return '%s' % self.domain_name
class EmailAccount(db.Model):
__tablename__ = 'email_account'
__table_args__ = (
db.UniqueConstraint('username', 'domain_id',
name='_uq_username_domain'),{}
)
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30))
domain_id = db.Column(db.Integer, db.ForeignKey('domain.id'))
domain = db.relationship('Domain', backref=db.backref('emailaccounts',
lazy='dynamic'))
def __init__(self,**kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
return '%s#%s ' % (self.username, self.domain)
I have added only the relevant attributes required here in the example. I wish to populate the model using a script by reading a csv file for the data. The script for domain table works well using Flask-SQLAlchemy, but the script for emailaccount table throws out exception. The script is as follows:
#Populate domains from csv
domain_file = "domain.csv"
csv_file = csv.DictReader(open(domain_file, 'rb'), delimiter=',')
for row in csv_file:
#data type conversion from (csv)string before inserting to table
for key, value in row.items():
#some code omitted
print key, value
domain = Domain(**row)
db.session.add(domain)
db.session.commit()
#Populate accounts from csv
accounts_file = "accounts.csv"
csv_file = csv.DictReader(open(accounts_file, 'rb'), delimiter=',')
for row in csv_file:
mdomain_name = ''
#data type conversion from (csv)string before inserting to table
for key, value in row.items():
print key, value
if key == 'domain':
mdomain = Domain.query.filter_by(domain_name = value).first()
mdomain_name = mdomain.domain_name
mdomain_id = mdomain.id
if key == 'domain_id':
value = mdomain_id
account = EmailAccount(**row)
db.session.add(account)
db.session.commit()
Exception thrown is:
File "data.py", line 55, in db.session.add(account)
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/scoping.py", line 149, in do return getattr(self.registry(), name)(*args, **kwargs)
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/session.py", line 1397, in add self._save_or_update_state(state)
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/session.py", line 1415, in _save_or_update_state halt_on=self._contains_state):
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/mapper.py", line 1986, in cascade_iterator parent_dict, visited_states, halt_on))
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/properties.py", line 930, in cascade_iterator get_all_pending(state, dict_)
File ".../local/lib/python2.7/site-packages/sqlalchemy/orm/attributes.py", line 761, in get_all_pending ret = [(instance_state(current), current)] AttributeError: 'str' object has no attribute '_sa_instance_state'
Pl. revert with the changes in the code in the script for data.py i.e script for uploading data for EmailAccount model which has foreignkey of Domain class. I wish to use Flask-SQLAlchemy only.
Extract of accounts.csv file:
Email Account,legacy_username,password,full_name,quota,is_domain_admin,is_catch_all,disabled_login,disabled_delivery
info#abc.com,,,,104857600,,,,
internal#abc.com,,,Internal,102400000,,,,
kiran.rs#abc.com,,,,102400000,,,, kishorepr,xyz.com,,,,209715200,,,,
When a row contains the domain key, you retrieve the domain to get its key, but you don't update your row with the domain id.
Then when you do:
account = EmailAccount(**row)
the row object still has the key domain associated with the domain name. Since your EmailAccount class uses the name domain for the relationship, the db thinks it will get a Domain object when in fact, it's getting a string (the name). That is why you get the error AttributeError: 'str' object has no attribute '_sa_instance_state'.
Update: this should work
for row in csv_file:
account_values = {}
for key, value in row.items():
if key == 'domain':
mdomain = Domain.query.filter_by(domain_name = value).first()
account_values['domain'] = mdomain
else:
account_values[key] = value
account = EmailAccount(account_values)
db.session.add(account)
db.session.commit()
I have one model (company category) populated by a table - simple names etc. I then have a company model and I'd like to link these two together such that I have have categories in a populated drop down box.
class CompanyCategory(db.Model):
categoryname = db.StringProperty(required=True)
class Company(db.Model):
companyurl = db.StringProperty(required=False)
companycategory = db.ReferenceProperty(CompanyCategory, collection_name='category')
However when I do this I get the following error:
<class 'google.appengine.ext.db.DuplicatePropertyError'>: Class CompanyCategory already has property categoryname
Traceback (most recent call last):
File "/base/data/home/apps/XXX/1.358759458095086806/showcompanies.py", line 52, in <module>
class Company(db.Model):
File "/base/python_runtime/python_lib/versions/1/google/appengine/ext/db/__init__.py", line 514, in __init__
_initialize_properties(cls, name, bases, dct)
File "/base/python_runtime/python_lib/versions/1/google/appengine/ext/db/__init__.py", line 429, in _initialize_properties
attr.__property_config__(model_class, attr_name)
File "/base/python_runtime/python_lib/versions/1/google/appengine/ext/db/__init__.py", line 3656, in __property_config__
self.collection_name))
How do I make this work? I have looked at some examples, but I'm not able to make work in this case. How can I make the companycategory a list.
Here is what is in the table:
CompanyCategory Entities
‹ Prev 20 1-1 Next 20 ›
ID/Name categoryname
id=96001 ss
This is where the model definition is the other way around. I want each company to have have a category which is populated from a table. With the following definition I can get something that resembles what I am looking for:
class CompanyCategory(db.Model):
categoryname = db.StringProperty(required=False)
def __unicode__(self):
return u'%s' % (self.categoryname)
class Company(db.Model):
companyurl = db.StringProperty(required=False)
companyname = db.StringProperty(required=False)
companydesc = db.TextProperty(required=False)
companyaddress = db.PostalAddressProperty(required=False)
companypostcode = db.StringProperty(required=False)
companyemail = db.EmailProperty(required=False)
companycountry = db.StringProperty(required=False)
companyvalid = db.BooleanProperty()
companyentrytime = db.DateTimeProperty(auto_now_add=True)
companylatlong = db.GeoPtProperty()
companycategory = db.ReferenceProperty(CompanyCategory)
I now get a drop down box when I go to add a company - populated from the values in the table. However I'm not getting the value's in the drop down populated into the text value in the datastore upon submission. How should this be done?
Pasting your code into shell.appspot.com works just fine - so whatever the cause of your problem, your sample code is not representative of it. As Christopher Ramirez suggests, you're probably declaring companycategory twice, either in the same model, or in a parent class of the model.
I am trying to create a program that loads in over 100 tables from a database so that I can change all appearances of a user's user id.
Rather than map all of the tables individually, I decided to use a loop to map each of the tables using an array of objects. This way, the table definitions can be stored in a config file and later updated.
Here is my code so far:
def init_model(engine):
"""Call me before using any of the tables or classes in the model"""
meta.Session.configure(bind=engine)
meta.engine = engine
class Table:
tableID = ''
primaryKey = ''
pkType = sa.types.String()
class mappedClass(object):
pass
WIW_TBL = Table()
LOCATIONS_TBL = Table()
WIW_TBL.tableID = "wiw_tbl"
WIW_TBL.primaryKey = "PORTAL_USERID"
WIW_TBL.pkType = sa.types.String()
LOCATIONS_TBL.tableID = "locations_tbl"
LOCATIONS_TBL.primaryKey = "LOCATION_CODE"
LOCATIONS_TBL.pkType = sa.types.Integer()
tableList = ([WIW_TBL, LOCATIONS_TBL])
for i in tableList:
i.tableID = sa.Table(i.tableID.upper(), meta.metadata,
sa.Column(i.primaryKey, i.pkType, primary_key=True),
autoload=True,
autoload_with=engine)
orm.mapper(i.mappedClass, i.tableID)
The error that this code returns is:
sqlalchemy.exc.ArgumentError: Class '<class 'changeofname.model.mappedClass'>' already has a primary mapper defined. Use non_primary=True to create a non primary Mapper. clear_mappers() will remove *all* current mappers from all classes.
I cant use clear_mappers as it wipes all of the classes and the entity_name scheme doesn't seem to apply here.
It seems that every object wants to use the same class, although they all should have their own instance of it.
Does anyone have any ideas?
Well, in your case it *is the same Class you try to map to different Tables. To solve this, create a class dynamically for each Table:
class Table(object):
tableID = ''
primaryKey = ''
pkType = sa.types.String()
def __init__(self):
self.mappedClass = type('TempClass', (object,), {})
But I would prefer slightly cleaner version:
class Table2(object):
def __init__(self, table_id, pk_name, pk_type):
self.tableID = table_id
self.primaryKey = pk_name
self.pkType = pk_type
self.mappedClass = type('Class_' + self.tableID, (object,), {})
# ...
WIW_TBL = Table2("wiw_tbl", "PORTAL_USERID", sa.types.String())
LOCATIONS_TBL = Table2("locations_tbl", "LOCATION_CODE", sa.types.Integer())
item = Table('Item', metadata, autoload=True, autoload_with=engine, encoding = 'cp1257')
class Item(object):
pass
from sqlalchemy.orm import mapper
mapper(Item, item)
I get error:
line 43, in <module>
mapper(Item, item)
File "C:\Python27\lib\site-packages\sqlalchemy\orm\__init__.py", line 890, in mapper
return Mapper(class_, local_table, *args, **params)
File "C:\Python27\lib\site-packages\sqlalchemy\orm\mapper.py", line 211, in __init__
self._configure_properties()
File "C:\Python27\lib\site-packages\sqlalchemy\orm\mapper.py", line 578, in _configure_properties
setparent=True)
File "C:\Python27\lib\site-packages\sqlalchemy\orm\mapper.py", line 618, in _configure_property
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
File "C:\Python27\lib\site-packages\sqlalchemy\orm\mapper.py", line 877, in _log
(self.non_primary and "|non-primary" or "") + ") " +
File "C:\Python27\lib\site-packages\sqlalchemy\util.py", line 1510, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "C:\Python27\lib\site-packages\sqlalchemy\sql\expression.py", line 3544, in description
return self.name.encode('ascii', 'backslashreplace')
UnicodeDecodeError: 'ascii' codec can't decode byte 0xeb in position 7: ordinal not in range(128)
I am connecting to MSSQL. table autoload seems to work. I only get this error while trying to map.
Thank you all for help!
Mapping the table to a class creates mapped properties on the class. The properties have the same name of the columns, by default. Since python 2.x only allows ascii identifiers, that fails if you have non-ascii column names.
The only solution I can think of is to give the identifiers a different name when mapping the table to a class.
The example below does that. Note that I'm creating the table on the code for simplicity, so anyone can run the code without having existing table. But you could do the same with a reflected table.
#-*- coding:utf-8 -*-
import sqlalchemy as sa
import sqlalchemy.orm
engine = sa.create_engine('sqlite://', echo=True) # new memory-only database
metadata = sa.MetaData(bind=engine)
# create a table. This could be reflected from the database instead:
tb = sa.Table('foo', metadata,
sa.Column(u'id', sa.Integer, primary_key=True),
sa.Column(u'nomé', sa.Unicode(100)),
sa.Column(u'ãéìöû', sa.Unicode(100))
)
tb.create()
class Foo(object):
pass
# maps the table to the class, defining different property names
# for some columns:
sa.orm.mapper(Foo, tb, properties={
'nome': tb.c[u'nomé'],
'aeiou': tb.c[u'ãéìöû']
})
After that you can use Foo.nome to refer to the nomé column and Foo.aeiou to refer to the ãéìöû column.
I faced the same problem and finally managed to do it replacing table['column'].key after autoloading it, just make all your table classes inherit this one and then modify the column name replacement in mapTo method or override manually the desired names with a dictionary and columns_descriptor method. I don't know if this is not the right way to do it but after searching for hours is the best aproach I've got.
class SageProxy(object):
#classmethod
def ismapped(cls, table_name=None):
if mappings:
if table_name:
if mappings.has_key(table_name):
tmap=mappings[table_name]
if tmap.has_key('class'):
tclass=tmap['class']
if tclass is cls:
return True
else:
for m in mappings:
if cls is m['class']:
return True
return False
#classmethod
def mappingprops(cls):
#override this to pass properties to sqlalchemy mapper function
return None
#classmethod
def columns_descriptors(cls):
#override this to map columns to different class properties names
#return dictionary where key is the column name and value is the desired property name
return {}
#classmethod
def mapTo(cls, table_name, map_opts=None):
if not cls.ismapped(table_name):
tab_obj=Table(table_name,sage_md,autoload=True)
for c in tab_obj.c:
#clean field names
tab_obj.c[c.name].key=c.key.replace(u'%',u'Porcentaje').replace(u'ñ',u'ny').replace(u'Ñ',u'NY').replace(u'-',u'_')
for k,v in cls.columns_descriptors():
if tab_obj.c[k]:
tab_obj.c[k].key=v
mapper(cls, tab_obj, properties=cls.mappingprops())
mappings[table_name]={'table':tab_obj,'class':cls}
return cls
I expect it will be usefull
I found that I could do this with a simple addition to my reflected class:
metadata = MetaData(bind=engine, reflect=True)
sm = sessionmaker(bind=engine)
class tblOrders(Base):
__table__ = metadata.tables['tblOrders']
meter = __table__.c['Meter#']
meter is now mapped to the underlying Meter# column, which allows this code to work:
currOrder = tblOrders()
currOrder.meter = '5'
Without the mapping, python sees it as a broken statement becase Meter followed by a comment does not exist in the object.
Consider this simple table definition (using SQLAlchemy-0.5.6)
from sqlalchemy import *
db = create_engine('sqlite:///tutorial.db')
db.echo = False # Try changing this to True and see what happens
metadata = MetaData(db)
user = Table('user', metadata,
Column('user_id', Integer, primary_key=True),
Column('name', String(40)),
Column('age', Integer),
Column('password', String),
)
from sqlalchemy.ext.declarative import declarative_base
class User(declarative_base()):
__tablename__ = 'user'
user_id = Column('user_id', Integer, primary_key=True)
name = Column('name', String(40))
I want to know what is the max length of column name e.g. from user table and from User (declarative class)
print user.name.length
print User.name.length
I have tried (User.name.type.length) but it throws exception
Traceback (most recent call last):
File "del.py", line 25, in <module>
print User.name.type.length
File "/usr/lib/python2.5/site-packages/SQLAlchemy-0.5.6-py2.5.egg/sqlalchemy/orm/attributes.py", line 135, in __getattr__
key)
AttributeError: Neither 'InstrumentedAttribute' object nor 'Comparator' object has an attribute 'type'
User.name.property.columns[0].type.length
Note, that SQLAlchemy supports composite properties, that's why columns is a list. It has single item for simple column properties.
This should work (tested on my machine) :
print user.columns.name.type.length
I was getting errors when fields were too big so I wrote a generic function to trim any string down and account for words with spaces. This will leave words intact and trim a string down to insert for you. I included my orm model for reference.
class ProductIdentifierTypes(Base):
__tablename__ = 'prod_id_type'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(length=20))
description = Column(String(length=100))
def trim_for_insert(field_obj, in_str) -> str:
max_len = field_obj.property.columns[0].type.length
if len(in_str) <= max_len:
return in_str
logger.debug(f'Trimming {field_obj} to {max_len} max length.')
trim_str = in_str[:(max_len-1)]
if ' ' in trim_str[:int(max_len*0.9)]:
return(str.join(' ', trim_str.split(' ')[:-1]))
return trim_str
def foo_bar():
from models.deals import ProductIdentifierTypes, ProductName
_str = "Foo is a 42 year old big brown dog that all the kids call bar."
print(_str)
print(trim_for_insert(ProductIdentifierTypes.name, _str))
_str = "Full circle from the tomb of the womb to the womb of the tomb we come, an ambiguous, enigmatical incursion into a world of solid matter that is soon to melt from us like the substance of a dream."
print(_str)
print(trim_for_insert(ProductIdentifierTypes.description, _str))```
If you have access to the class:
TableClass.column_name.type.length
If you have access to an instance, you access the Class using the __class__ dunder method.
table_instance.__class__.column_name.type.length
So in your case:
# Via Instance
user.__class__.name.type.length
# Via Class
User.name.type.length
My use case is similar to #Gregg Williamson
However, I implemented it differently:
def __setattr__(self, attr, value):
column = self.__class__.type
if length := getattr(column, "length", 0):
value = value[:length]
super().__setattr__(name, value)