In our system, we have two similar(but not same) databases. So I built these sqlalchemy models:
# base.py
Base = declarative_base()
class T1(Base):
__tablename__ = 't1'
id = Column(Integer, primary_key=True)
name = Column(String)
# production1.py
from . import base
class T1(base.T1):
status1 = Column(String)
# production2.py
from . import base
class T1(base.T1):
status2 = Column(String)
# sessions.py
engine1 = create_engine(**production1_params)
session1 = scoped_session(sessionmaker(bind=engine1))
engine2 = create_engine(**production2_params)
session2 = scoped_session(sessionmaker(bind=engine2))
Then I can access different database by:
import production1, production2
session1().query(production1.T1)
session2().query(production2.T2)
Now, I want to build our API system by graphql. First, I inherit from SQLAlchemyConnectionField to support database switching.
class SwitchableConnectionField(SQLAlchemyConnectionField):
def __init__(self, type, *args, **kwargs):
kwargs.setdefault('db_type', String())
super
#classmethod
def get_query(self, model, info, sort=None, **args):
session = get_query(args['db_type'])
query = session.query(model)
...
But when I want to define my nodes, I found the definitions must be:
import production1, production2
class Production1Node(SQLAlchemyObjectType):
class Meta:
model = production1,T1
interfaces = (Node,)
class Production2Node(SQLAlchemyObjectType):
class Meta:
model = production2.T1
interfaces = (Node,)
There are two nodes definitions to support different databases. But I want to do something like:
import base
class ProductionNode(SQLAlchemyObjectType):
class Meta:
model = base.T1
interfaces = (Node,)
So that I can switch similar model at run time. However, even though I try to inherit from Node, I can't implement it. Does anyone know what should I do?
Related
I'm using joind table inheritance here.
class BaseEntity(Base):
some_col = Column(String)
base_relationship = relationship("some_relationship", backref="depends_on_who_inherits_me")
class SubEntity(BaseEntity):
some_unique_col = Column(String)
Because the specific backref name will only become known in the run-time(in this case, it should be SubEntity, but it should be able to be inheritable by unlimited subclasses), I need depends_on_who_inherits_me part a variable, or more specifically, the inheriting sub-class's name, instead of a string. So each sub-class will have a relationship referring to a third-party class, while having it referring back to that particular sub-class by its appropriate name.
However, because this is outside of any method, I can't use self to flexibly refer to the instances.
How to implement this idea? Thanks.
One way you might achieve this is with a Mixin that uses declared_attr.cascading.
Here's the mixin class:
class Mixin:
#declared_attr.cascading
def related_entity(cls):
if has_inherited_table(cls):
return relationship(
'RelatedEntity',
backref=cls.__name__.lower(),
uselist=False
)
The cascading flag on declared_attr will make sqlalchemy attempt to render the 'mixed in' attribute on every class in the hierarchy. Or as the docs put it:
This is a special-use modifier which indicates that a column or
MapperProperty-based declared attribute should be configured
distinctly per mapped subclass, within a mapped-inheritance scenario.
The has_inherited_table() function, allows us to determine within the mixin if we are dealing with the BaseEntity or a subclass, so that we only add the relationships on to the subclasses.
The mixin is then inherited into the BaseEntity model:
class BaseEntity(Base, Mixin):
id = sa.Column(sa.Integer, primary_key=True)
related_id = sa.Column(
sa.Integer, sa.ForeignKey('relatedentity.id'))
discriminator = sa.Column(sa.String)
#declared_attr
def __mapper_args__(cls):
if has_inherited_table(cls):
args = {'polymorphic_identity': cls.__name__.lower()}
else:
args = {'polymorphic_on': cls.discriminator}
return args
As you mentioned in your question that you are using joined table inheritance, I've defined the __mapper_args__ on BaseEntity using a #declared_attr method so that the polymorphic_identity also can be automatically generated from the class name for the subclasses.
So with this configuration, every subclass of BaseEntity will apply a relationship attribute on RelatedEntity named after the subclass. Here's the full working example:
import sqlalchemy as sa
from sqlalchemy.ext.declarative import (declarative_base, declared_attr,
has_inherited_table)
from sqlalchemy.orm import relationship, sessionmaker
class BaseClass:
#declared_attr
def __tablename__(cls):
return cls.__name__.lower()
Base = declarative_base(cls=BaseClass)
engine = sa.create_engine('sqlite://', echo=False)
Session = sessionmaker(bind=engine)
class Mixin:
#declared_attr.cascading
def related_entity(cls):
if has_inherited_table(cls):
return relationship(
'RelatedEntity',
backref=cls.__name__.lower(),
uselist=False
)
class BaseEntity(Base, Mixin):
id = sa.Column(sa.Integer, primary_key=True)
related_id = sa.Column(
sa.Integer, sa.ForeignKey('relatedentity.id'))
discriminator = sa.Column(sa.String)
#declared_attr
def __mapper_args__(cls):
if has_inherited_table(cls):
args = {'polymorphic_identity': cls.__name__.lower()}
else:
args = {'polymorphic_on': cls.discriminator}
return args
class RelatedEntity(Base):
""" Class that is related to all `BaseEntity` subclasses"""
id = sa.Column(sa.Integer, primary_key=True)
class SubEntity(BaseEntity):
""" Will generate `RelatedEntity.subentity`"""
id = sa.Column(sa.Integer, sa.ForeignKey('baseentity.id'),
primary_key=True)
class OtherEntity(BaseEntity):
""" Will generate `RelatedEntity.otherentity`"""
id = sa.Column(sa.Integer, sa.ForeignKey('baseentity.id'),
primary_key=True)
if __name__ == '__main__':
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
s = Session()
rel_inst = RelatedEntity()
s.add(rel_inst)
rel_inst.subentity.append(SubEntity())
rel_inst.otherentity.append(OtherEntity())
s.commit()
print(rel_inst.subentity, rel_inst.otherentity)
# [<__main__.SubEntity object at 0x0000023487D42C18>] [<__main__.OtherEntity object at 0x0000023487D60278>]
The reason we can't define the related_entity() declared_attr method in BaseModel is because SQLAlchemy will not honor the cascade, and there will be no relationships generated (becuase the if has_inherited_table(cls): block prevents BaseModel from generating one). From the docs:
The flag only applies to the use of declared_attr on declarative mixin
classes and __abstract__ classes; it currently has no effect when used
on a mapped class directly.
In my Rest application I want to return json like JSONAPI format, but I need to create Schema class for it and create every field again that are already there in my model. So instead of creating every field in schema class can I not take it from DB Model..
below is my model class
class Author(db.Model):
id = db.Column(db.Integer)
name = db.Column(db.String(255))
I am defining Schema like below.
class AuthorSchema(Schema):
id = fields.Str(dump_only=True)
name = fields.Str()
metadata = fields.Meta()
class Meta:
type_ = 'people'
strict = True
So here, id and name I have defined it twice. so is there any option in marshmallow-jsonapi to assign model name in schema class so it can take all fields from model
Note: I am using marshmallow-jsonapifor it, I have tried marshmallow-sqlalchemy , it has that option but it not return json in JSONAPI format
You can use flask-marshmallow's ModelSchema and marshmallow-sqlalchemy in combination with marshmallow-jsonapi with the caveat that you have to subclass not only the Schema classes but also the SchemaOpts classes, like this:
# ...
from flask_marshmallow import Marshmallow
from marshmallow_jsonapi import Schema, SchemaOpts
from marshmallow_sqlalchemy import ModelSchemaOpts
# ...
ma = Marshmallow(app)
# ...
class JSONAPIModelSchemaOpts(ModelSchemaOpts, SchemaOpts):
pass
class AuthorSchema(ma.ModelSchema, Schema):
OPTIONS_CLASS = JSONAPIModelSchemaOpts
class Meta:
type_ = 'people'
strict = True
model = Author
# ...
foo = AuthorSchema()
bar = foo.dump(query_results).data # This will be in JSONAPI format including every field in the model
Is there a possibility to make the __tablename__ in flask-sqlalchemy models dynamic with the declarative base approach?
Usually you set it as this one:
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
email = Column(String(120), unique=True)
def __init__(self, name=None, email=None):
self.name = name
self.email = email
def __repr__(self):
return '<User %r>' % (self.name)
I would like to change it through a parameter (maybe in the constructor?), so that I can have a table per user.
I found some other approaches in this guide here
Approaches
but I would like to use the session for that as I am already using it for the other models.
You can utilize python's type() function to dynamically build SQLAlchemy models.
Here's a example:
# define columns in an abstract model class
class Log(Base):
__abstract__ = True # this line is necessary
# the columns id, content and user_id are just examples, just ignore it.
id = Column(BIGINT(64), primary_key=True)
content = Column(VARCHAR(200), nullable=False)
user_id = Column(INTEGER(unsigned=True))
# build a model class with a specific table name
def get_log_model(year):
tablename = 'logs_%s' % year # dynamic table name
Model = type('Model', (Log,), {
'__tablename__': tablename
})
return Model
# Log2022 correspond to table "logs_2022"
Log2022 = get_step_model(2022)
# use the dynamically built model in the same way as regular models
print(session.query(Log2022).count()) # row count of table "logs_2022"
I also wrote an article about it on my website, it may help you too: https://easydevguide.com/posts/dynamic_table
I use SQLalchemy as my ORM and am trying to port my test fixtures to factory_boy. My schema includes two objects in a one-to-many relation. I.e. instances of one model have list like structures with instances of the other. Example:
class Person(...):
id = Column(Integer, primary_key=True)
name = Column(Text)
[...]
class Address(...):
id = Column(Integer, primary_key=True)
city = Column(Text)
[...]
person_id = Column(Integer, ForeignKey('person.id'))
person = relationship("Person", backref="addresses")
Now I am trying to create a factory which creates persons with a couple of addresses. Factory_boy has the SubFactory. But I only see how you can use that in a one-to-one relationship. I know I can create the addresses with a separate factory and then attach them, but I would like to do something like person =PersonFactory.create(num_addresses=4)`.
Does anyone know if this is currently possible in factory_boy?
I use factory_boy 2.4.1.
I am using this pattern in my project. Assuming you already have AddressFactory.
https://factoryboy.readthedocs.io/en/latest/reference.html?highlight=post_generation#factory.post_generation
class PersonFactory(factory.alchemy.SQLAlchemyFactory):
class Meta:
model = Person
#factory.post_generation
def addresses(obj, create, extracted, **kwargs):
if not create:
return
if extracted:
assert isinstance(extracted, int)
AddressFactory.create_batch(size=extracted, person_id=self.id, **kwargs)
Usage
PersonFactory(addresses=4)
This will create Person with 4 Addresses
Also this can accept kwargs
PersonFactory(addresses=2, addresses__city='London')
This will create Person with 2 Addresses which have city field set to 'London'
Here is blog post which may help https://simpleit.rocks/python/django/setting-up-a-factory-for-one-to-many-relationships-in-factoryboy/
#Kristen pointed to the right direction, but AdderssFactory didn't related to Person.
In Django we can use post_generation decorator like this.
class PersonFactory(BaseFactory):
#factory.post_generation
def addresses(self, create, extracted, **kwargs):
self.addresses_set.add(AddressFactory(person=self))
I had this exact question and was disappointed in the lack of good answers here. Turns out it is possible! Leaving this here for those who have the same question.
First, your model needs to define the relationship on the opposite model from the ForeignKey, so it should look like:
class Person(...):
id = Column(Integer, primary_key=True)
name = Column(Text)
addresses = relationship("Person", backref="person")
[...]
class Address(...):
id = Column(Integer, primary_key=True)
city = Column(Text)
[...]
person_id = Column(Integer, ForeignKey('person.id'))
Then, on your PersonFactory, you can add a post_generation hook like this:
class PersonFactory(BaseFactory):
[...attributes...]
#factory.post_generation
def addresses(self, create, extracted, **kwargs):
return AddressFactory.create_batch(4)
and replace the '4' with whatever number you want. Obviously, you need to define the AddressFactory as well.
Currently, there is no way to implement a "many-to-one RelatedFactory" such that it is "baked into your factory"...
That said, this behavior can be implemented with a bit of hackery when instantiating your PersonFactory.
The following recipe will get you what you are looking for:
from sqlalchemy import create_engine, Integer, Text, ForeignKey, Column
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, scoped_session, sessionmaker
import factory
from factory.alchemy import SQLAlchemyModelFactory as sqla_factory
import random
engine = create_engine("sqlite:////tmp/factory_boy.sql")
session = scoped_session(sessionmaker(bind=engine))
Base = declarative_base()
class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(Text)
addresses = relationship("Address", backref="person")
class Address(Base):
id = Column(Integer, primary_key=True)
street = Column(Text)
street_number = Column(Integer)
person_id = Column(Integer, ForeignKey('person.id'))
class AddressFactory(sqla_factory):
class Meta:
model = Address
sqlalchemy_session = session
street_number = random.randint(0, 10000)
street = "Commonwealth Ave"
class PersonFactory(sqla_factory):
class Meta:
model = Person
sqlalchemy_session = session
name = "John Doe"
Base.metadata.create_all(engine)
for i in range(100):
person = PersonFactory(addresses=AddressFactory.create_batch(3))
You could use the solution described here: http://factoryboy.readthedocs.org/en/latest/recipes.html#reverse-dependencies-reverse-foreignkey
Basically, just declare a few RelatedFactory on your PersonFactory:
class PersonFactory(factory.alchemy.SQLAlchemyFactory):
class Meta:
model = Person
address_1 = factory.RelatedFactory(AddressFactory, 'person')
address_2 = factory.RelatedFactory(AddressFactory, 'person')
So say, for example I have two classes:
Base = declarative_base()
class Vendor(Base):
__tablename__ = "vendor"
id = Column(Integer, primary_key=True)
name = Column(String(45))
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Vendor id=%d>" % self.id
class Site(Base):
__tablename__ = "site"
id = Column(Integer, primary_key=True)
vendor_id = Column(Integer, ForeignKey('vendor.id'))
vendor = relation(Vendor)
def __init__(self, name, code, location):
self.name = name
def __repr__(self):
return "<Site id=%d>" % self.id
class SQLSchema:
def __init__(self):
self.engine = create_engine('...', echo=False)
self.metadata = Base.metadata
self.session = sessionmaker(bind=self.engine)()
self.create()
def create(self):
self.metadata.create_all(self.engine)
I've simplified the class structure, now, using these classes, I can use:
sql = sqlschema.SQLSchema()
So, at times, I want easy access to a Vendor, which means I can use:
v = sql.session.query(self).filter_by(name='test').one()
I'd prefer to simplify the access by using akin to (currently my best effort):
Vendor.get(sql.session, name='A-01')
It struck me, that the get function is pretty generic that I'd want across all my classes that inherit from Base and I looked into the best way of doing this. There are two ways I can think of:
a Mix-in class
Modifying the metaclass supplied to declarative_base
Example of the metaclass modification
class MyDeclarativeMeta(DeclarativeMeta):
def get(self, session, **filterargs):
session.query(self).filter_by(**filterargs).one()
## ...
Base = declarative_base(metaclass=MyDeclarativeMeta)
I'd like to create something with the least surprise possible. What are people's opinions on the options I've presented and is there a better way altogether?
Custom metaclasses aren't needed with Declarative for simple use cases, and pretty much not at all for hard use cases either. Mixins + custom bases should be able to do pretty much everything.
Declare it on the base:
class Base(object):
def get(...):
# ...
Base = declarative_base(cls=Base)
or use a mixin.