Using factory_boy with SQLAlchemy and class methods - python

I am working on a Pyramid app with SQLAlchemy as the ORM. I am trying to test a model with a class method:
# this is essentially a global used by all the models
Session = scoped_session(sessionmaker(autocommit=False))
class Role(Base):
__tablename__ = 'role'
id = sa.Column(sa.types.Integer, primary_key=True)
name = sa.Column(sa.types.Text, unique=True, nullable=False)
def __init__(self, **kwargs):
super(Role, self).__init__(**kwargs)
#classmethod
def find_all(self):
return Session.query(Role).order_by(Role.name).all()
I am using factory_boy to test and here is how I am trying to set up my testing factory:
import factory
from factory.alchemy import SQLAlchemyModelFactory
from sqlalchemy.orm import scoped_session, sessionmaker
from zk.model.meta import Base
from zk.model.role import Role
session = scoped_session(sessionmaker())
engine = create_engine('sqlite://')
session.configure(bind=engine)
Base.metadata.create_all(engine)
class RoleFactory(SQLAlchemyModelFactory):
FACTORY_FOR = Role
FACTORY_SESSION = session
However when I try to call RoleFactory.find_all() in a test, I get an error: E UnboundExecutionError: Could not locate a bind configured on mapper Mapper|Role|role, SQL expression or this Session
I tried monkeypatching meta and replacing that global Session with my session, but then I get this error: E AttributeError: type object 'RoleFactory' has no attribute 'find_all'
I tried calling RoleFactory.FACTORY_FOR.find_all() but then I get the same UnboundExecutionError.
Do I need to do something else for factory_boy to know about the class method?

This might be too obvious, but it seems that what you've got is a RoleFactory instance, when you need a Role instance, the factory wouldn't have access to any classmethods since it's not a child of the class. Try doing this and seeing what happens:
role = RoleFactory.build()
roles = role.find_all()

Related

How use pytest to unit test sqlalchemy orm classes

I want to write some py.test code to test 2 simple sqlalchemy ORM classes that were created based on this Tutorial. The problem is, how do I set a the database in py.test to a test database and rollback all changes when the tests are done? Is it possible to mock the database and run tests without actually connect to de database?
here is the code for my classes:
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import sessionmaker, relationship
eng = create_engine('mssql+pymssql://user:pass#host/my_database')
Base = declarative_base(eng)
Session = sessionmaker(eng)
intern_session = Session()
class Author(Base):
__tablename__ = "Authors"
AuthorId = Column(Integer, primary_key=True)
Name = Column(String)
Books = relationship("Book")
def add_book(self, title):
b = Book(Title=title, AuthorId=self.AuthorId)
intern_session.add(b)
intern_session.commit()
class Book(Base):
__tablename__ = "Books"
BookId = Column(Integer, primary_key=True)
Title = Column(String)
AuthorId = Column(Integer, ForeignKey("Authors.AuthorId"))
Author = relationship("Author")
I usually do that this way:
I do not instantiate engine and session with the model declarations, instead I only declare a Base with no bind:
Base = declarative_base()
and I only create a session when needed with
engine = create_engine('<the db url>')
db_session = sessionmaker(bind=engine)
You can do the same by not using the intern_session in your add_book method but rather use a session parameter.
def add_book(self, session, title):
b = Book(Title=title, AuthorId=self.AuthorId)
session.add(b)
session.commit()
It makes your code more testable since you can now pass the session of your choice when you call the method.
And you are no more stuck with a session bound to a hardcoded database url.
I add a custom --dburl option to pytest using its pytest_addoption hook.
Simply add this to your top-level conftest.py:
def pytest_addoption(parser):
parser.addoption('--dburl',
action='store',
default='<if needed, whatever your want>',
help='url of the database to use for tests')
Now you can run pytest --dburl <url of the test database>
Then I can retrieve the dburl option from the request fixture
From a custom fixture:
#pytest.fixture()
def db_url(request):
return request.config.getoption("--dburl")
# ...
Inside a test:
def test_something(request):
db_url = request.config.getoption("--dburl")
# ...
At this point you are able to:
get the test db_url in any test or fixture
use it to create an engine
create a session bound to the engine
pass the session to a tested method
It is quite a mess to do this in every test, so you can make a usefull usage of pytest fixtures to ease the process.
Below are some fixtures I use:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
#pytest.fixture(scope='session')
def db_engine(request):
"""yields a SQLAlchemy engine which is suppressed after the test session"""
db_url = request.config.getoption("--dburl")
engine_ = create_engine(db_url, echo=True)
yield engine_
engine_.dispose()
#pytest.fixture(scope='session')
def db_session_factory(db_engine):
"""returns a SQLAlchemy scoped session factory"""
return scoped_session(sessionmaker(bind=db_engine))
#pytest.fixture(scope='function')
def db_session(db_session_factory):
"""yields a SQLAlchemy connection which is rollbacked after the test"""
session_ = db_session_factory()
yield session_
session_.rollback()
session_.close()
Using the db_session fixture you can get a fresh and clean db_session for each single test.
When the test ends, the db_session is rollbacked, keeping the database clean.

How to commit another object in a property of an object in sqlalchemy?

As the title says.
Here are the codes.
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, func, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from sqlalchemy import create_engine
import sqlalchemy.exc
from sqlalchemy import event
from settings import DB_HOST
def return_a_scoped_session():
engine = create_engine(DB_HOST)
session_factory = sessionmaker(bind=engine)
db_session = scoped_session(session_factory)
return db_session()
Base = declarative_base()
class MyClass(Base):
"""Doc string for MyClass"""
__tablename__ = 'my_table'
file_name = Column(String(512), nullable=True)
class Aria2Jobs(Base):
__tablename__ = 'nh_downloading_jobs'
id = Column(Integer, primary_key = True)
file_name = Column(String(512), nullable=True)
is_verified = Column(Boolean, default=False, nullable=True)
def check_if_verified(self):
if self.is_verified:
# create an instance
a_job= MyClass(file_name=self.file_name)
_session = return_a_scoped_session()
_session.add(a_job)
_session.commit()
_session.close()
# event
#event.listens_for(Aria2Jobs.is_verified, 'set')
def send_to_jsonpyes_jobs(target, value, oldvalue, initiator):
target.check_if_verified()
# error is when I set a property of an object (this property will trigger an event 'set' and the event will try to commit a session.
session = return_a_scoped_session()
row = session.query(Aria2Jobs).first()
row.is_verified = True
session.add(row)
# the error came out
#
# sqlalchemy.exc.invalidrequesterror object is already attached to session
session.commit()
# How to commit another object in a property of an object?
How to commit another object in a property of an object in sqlalchemy?
As you can see, when I tried session.add(row),
error:
sqlalchemy.exc.invalidrequesterror object is already attached to session
I don't know which session the row is attached to.
I want to run the function check_if_verified
-- latest error --
sqlalchemy.exc.InvalidRequestError: Object '<Aria2Jobs at 0x7fad3a635050>' is already attached to session '1' (this is
Just remove session.add(row). Becouse you used session.query the row object is already in your session.
To save the data is enough to run session.commit function
You're using scoped_session incorrectly. What's happening here is each time you call return_a_scoped_session() it's returning a new session, with a completely new engine. row is being added to a different session somewhere else. (In fact, the code you posted doesn't even show the other places it's being added; I can't reproduce your error with the code you posted.) The solution is to fix your scoped_session:
engine = create_engine(DB_HOST)
Session = scoped_session(sessionmaker(bind=engine))
def return_a_scoped_session():
return Session()

Instantiating object automatically adds to SQLAlchemy Session. Why?

From my understanding of SQLAlchemy, in order to add a model to a session, I need to call session.add(obj). However, for some reason, in my code, SQLAlchemy seems to do this automatically.
Why is it doing this, and how can I stop it? Am I approaching session in the correct way?
example
>>> from database import Session as db
>>> import clients
>>> from instances import Instance
>>> from uuid import uuid4
>>> len(db.query(Instance).all())
>>> 0 # Note, no instances in database/session
>>> i = Instance(str(uuid4()), clients.get_by_code('AAA001'), [str(uuid4())])
>>> len(db.query(Instance).all())
>>> 1 # Why?? I never called db.add(i)!
database.py
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import config
Base = declarative_base()
class Database():
def __init__(self):
db_url = 'postgresql://{:s}:{:s}#{:s}:{}/{:s}'.format(
config.database['user'],
config.database['password'],
config.database['host'],
config.database['port'],
config.database['dbname']
)
self.engine = create_engine(db_url)
session_factory = sessionmaker(bind=self.engine)
self.session = scoped_session(session_factory)
Database = Database()
Session = Database.session
instance.py
from sqlalchemy import Column, Text, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.dialects.postgresql import UUID, ARRAY
import database
Base = database.Base
class Instance(Base):
__tablename__ = 'instances'
uuid = Column(UUID, primary_key=True)
client_code = Column(
Text, ForeignKey('clients.code', ondelete='CASCADE'), nullable=False)
mac_addresses = Column(ARRAY(Text, as_tuple=True),
primary_key=True)
client = relationship("Client", back_populates="instances")
def __init__(self, uuid, client, mac_addresses):
self.uuid = uuid
self.client = client
self.mac_addresses = tuple(mac_addresses)
client.py
from sqlalchemy import Column, Text
from sqlalchemy.orm import relationship
import database
from database import Session as db
Base = database.Base
class Client(Base):
__tablename__ = 'clients'
code = Column(Text, primary_key=True)
name = Column(Text)
instances = relationship("Instance", back_populates='client')
def __init__(self, code, name=None):
self.code = code
self.name = name
def get_by_code(code):
client = db.query(Client).filter(Client.code == code).first()
return client
When you create a SQLAlchemy object and link it directly to another SQLAlchemy object, both objects end up in the session.
The reason is that SQLAlchemy needs to make sure you can query these objects.
Take, for example, a user with addresses.
If you create a user in code, with an address, both the user and the address end up in the session, because the address is linked to the user and SQLAlchemy needs to make sure you can query all addresses of a user using user.addresses.all().
In that case all (possibly) existing addresses need to be fetched, as well as the new address you just added. For that purpose the newly added address needs to be saved in the database.
To prevent this from happening (for example if you only need objects to just calculate with), you can link the objects with their IDs/Foreign Keys:
address.user_id = user.user_id
However, if you do this, you won't be able to access the SQLAlchemy properties anymore. So user.addresses or address.user will no longer yield results.
The reverse is also true; I asked a question myself a while back why linking two objects by ID will not result in SQLAlchemy linking these objects in the ORM:
relevant stackoverflow question
another description of this behavior

scoped_session object has no attribute 'create_all'

I have the following code that sets up my database with Flask-SQLAlchemy. I'm getting an exception "AttributeError: scoped_session object has no attribute 'create_all'". Can someone please explain to me why I'm getting the error and how I can fix it? :)
__init__.py:
from flask import Flask
app = Flask(__name__)
from app.db import DB
database = DB()
database.create_all()
db/__init__.py:
from flask import session
from flask.ext.sqlalchemy import SQLAlchemy
from app import app
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://user:pass#dburl:/schema'
connection = SQLAlchemy(app)
from app.db.models import *
class DB():
def __init__(self):
pass
def create_all(self):
connection.session.create_all()
connection.session.commit()
print("done")
models.py:
from app.db import connection as db
class Test(db.Model):
__tablename__ = 'Test'
id = db.Column("ID", db.Integer, primary_key=True)
name = db.Column("Name", db.String(100), nullable=False)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Test: %s>' % self.name
Any guidance would be greatly appreciated!
It's connection.create_all(), you added session in the middle.
Unrelated to the immediate problem, there are other things that don't look right.
You don't need to commit after running create_all.
The extension instance is usually named db. The DB class does nothing, just write your create_all function on its own.
You don't need to specify table or column names for Flask-SQLAlchemy models in most cases, and upper case names are not generally used.
Don't use tabs, PEP 8 recommends 4 spaces.

Is it possible to unload declarative classes in SQLAlchemy?

I’m working on a library where the user shall be able to simply declare a few classes which are automatically backed by the database. In short, somewhere hidden in the code, there is
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class LibraryBase(Base):
# important library stuff
and the user should then do
class MyStuff(LibraryBase):
# important personal stuff
class MyStuff_2(LibraryBase):
# important personal stuff
mystuff = MyStuff()
Library.register(mystuff)
mystuff.changeIt() # apply some changes to the instance
Library.save(mystuff) # and save it
# same for all other classes
In a static environment, e.g. the user has created one file with all personal classes and imports this file, this works pretty well. All class names are fixed and SQLAlchemy knows how to map each class.
In an interactive environment, things are different: Now, there is a chance of a class being defined twice. Both classes might have different modules; but still SQLAlchemy will complain:
SAWarning: The classname 'MyStuff' is already in the registry of this declarative base, mapped to < class 'OtherModule.MyStuff' >
Is there a way to deal with this? Can I somehow unload a class from its declarative_base so that I can exchange its definition with a new one?
You can use:
sqlalchemy.orm.instrumentation.unregister_class(cl)
del cl._decl_class_registry[cl.__name__]
The first line is to prevent accidental use of your unregisted class. The second unregisters and will prevent the warning.
It looks like, And I'm not really sure this even works, but I think what you want is
sqlalchemy.orm.instrumentation.unregister_class()
http://hg.sqlalchemy.org/sqlalchemy/file/762548ff8eef/lib/sqlalchemy/orm/instrumentation.py#l466
In my project I use this solution.
Where library specified columns defined as mixin by declared_attr and target mapper created by type call with bases, as result I have full functional mapper.
from sqlalchemy import create_engine, BigInteger, Column
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import declared_attr
Base = declarative_base()
class LibraryBase(object):
__tablename__ = 'model'
#declared_attr
def library_field(self):
return Column(BigInteger)
class MyLibrary(object):
#classmethod
def register(cls, entity):
tablename = entity.__tablename__
Mapper = type('Entity_%s' % tablename, (Base, LibraryBase, entity), {
'__tablename__': tablename,
'id': Column(BigInteger, primary_key=True),
})
return Mapper
#classmethod
def setup(cls):
Base.metadata.create_all()
class MyStaff(object):
__tablename__ = 'sometable1'
#declared_attr
def staff_field(self):
return Column(BigInteger)
def mymethod(self):
print('My method:', self)
class MyStaff2(MyStaff):
__tablename__ = 'sometable2'
if __name__ == '__main__':
engine = create_engine('sqlite://', echo=True)
Base.metadata.bind = engine
Session = scoped_session(sessionmaker(bind=engine))
session = Session()
# register and install
MyStaffMapper = MyLibrary.register(MyStaff)
MyStaffMapper2 = MyLibrary.register(MyStaff2)
MyLibrary.setup()
MyStaffMapper().mymethod()
MyStaffMapper2().mymethod()
session.query(MyStaffMapper.library_field) \
.filter(MyStaffMapper.staff_field != None) \
.all()

Categories