Checked for these 4 things, and I don't seem to see issues:
1. Same data type
2. Same nullable designation
3. Foreign Key being assigned to PK or Unique column
4. Same Charset for both
Foreign Key: table_entity.db_id FOR Unique Column: db_entity.db_id
db_entity_schema.py (Parent)
from sqlalchemy import Column, String, SMALLINT
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT, CHAR
class DbEntity(Base=declartive_base):
__tablename__ = "db_entity"
seq = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), unique=True, nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
table_entity_schema.py (Child)
from sqlalchemy import Column, String, ForeignKey
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT
from sqlalchemy import UniqueConstraint
class TableEntity(Base=declartive_base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey("db_entity.db_id"), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
When migrating, resulting in a errno: 150 "Foreign key constraint is incorrectly formed" which is quite puzzling.
Have you tried to declare the related object like
from sqlalchemy import Column, String, ForeignKey
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT
from sqlalchemy import UniqueConstraint
class TableEntity(Base=declartive_base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey(DbEntity.db_id), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
With that said, it seems like you have a perfectly good primary key on the DbEntity.seq, why not use that for your foreign key?
This probably isn't the complete answer but I'm posting this to display the migration I autogenerated to see if it helps narrow down the issue.
Using SQLAlchemy 1.4.45, alembic 1.9.0, pymysql 1.0.2 and mysql 8.0.31-1.el8 (docker image).
I don't know much about mysql drivers so I have no preference.
771ce744af5d_.py
"""empty message
Revision ID: 771ce744af5d
Revises:
Create Date: 2022-12-22 02:11:08.325864
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '771ce744af5d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('db_entity',
sa.Column('seq', mysql.BIGINT(display_width=20), autoincrement=True, nullable=False),
sa.Column('db_id', sa.String(length=24), nullable=False, comment='DB ID'),
sa.Column('db_service_id', sa.String(length=24), nullable=False),
sa.PrimaryKeyConstraint('seq'),
sa.UniqueConstraint('db_id'),
schema='testdb'
)
op.create_table('table_entity',
sa.Column('table_no', mysql.BIGINT(display_width=20), autoincrement=True, nullable=False),
sa.Column('db_id', sa.String(length=24), nullable=False, comment='DB ID'),
sa.Column('db_service_id', sa.String(length=24), nullable=False),
sa.Column('schema_name', sa.String(length=128), nullable=False),
sa.Column('table_name', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['db_id'], ['testdb.db_entity.db_id'], ),
sa.PrimaryKeyConstraint('table_no'),
sa.UniqueConstraint('schema_name', 'table_name', name='schema_table_uq_constraint'),
schema='testdb'
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('table_entity', schema='testdb')
op.drop_table('db_entity', schema='testdb')
# ### end Alembic commands ###
Alembic's env.py
I just slopped this together to autogenerate a revision so please excuse it's formatting and messy imports.
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
from sqlalchemy import (
Integer,
String,
ForeignKey,
UniqueConstraint,
)
from sqlalchemy.schema import (
Column,
)
from sqlalchemy.orm import backref, relationship, declarative_base, Session
from sqlalchemy import create_engine, MetaData, Column, ForeignKey, Integer, String
Base = declarative_base(metadata=MetaData(schema="testdb"))
from sqlalchemy import Column, String, SMALLINT
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT, CHAR
class DbEntity(Base):
__tablename__ = "db_entity"
seq = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), unique=True, nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
class TableEntity(Base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey("db_entity.db_id"), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
I'm using SqlAlchemy 1.3.20 and python3.8
In below code i used class sessionmaker.than i created an object.
when i call object methods such as : add() , commit() , query() and etc
i see this error TypeError: "Session' object is not callable.
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import Sessionmaker,relationship
base = declarative_base()
engine = create_engine('sqlite:///c:\\users\\alierza\\desktop\\python\\instagram\\test.db',echo=True)
class Cars(base):
__tablename__ = 'cars'
id = Column(Integer, primary_key=True)
make = Column(String(50), nullable=False)
color = Column(String(50), nullable=False)
class carOweners(base):
__tablename__ = 'carowner'
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
age = Column(Integer, nullable=False)
carid = Column(Integer, ForeignKey('cars.id'))
car = relationship('Cars')
Session=sessionmaker(bind=engine)
session=Session()
base.metadata.create_all(engine)
car1=Cars(make='Ford',color='siliver')
owenr1=carOweners(name='Joe',age=20,carid=(car1.id))
session().add(car1)
session().add(owenr1)
session().commit()
its seems that methods does not exist.
Any tips or help?
so as the problem says, I want to set up a SERIAL column's counterpart in my SQLAlchemy model. Can anyone help me?
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Review(db.Model):
__tablename__ = "reviews"
id = db.Column(db.INTEGER,(???), primary_key=True)
name = db.Column(db.VARCHAR, db.ForeignKey("users.username"))
bookisbn = db.Column(db.CHAR(10), db.ForeignKey("books.isbn"))
review = db.Column(db.VARCHAR, nullable=False)
rating = db.Column(db.INTEGER, nullable=False)
I'm trying to figure out why I need to use a no_autoflush block when inserting data into an association proxy if the association proxy data has been accessed first. An example of this is bellow (using MySQL):
from sqlalchemy import create_engine, Integer, Column, String, ForeignKey, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, scoped_session
Base = declarative_base()
engine = create_engine('{}://{}:{}#{}/{}'.format(...))
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
class DomainModel(Base):
__tablename__ = 'domains'
id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False, unique=True)
domains_to_servers = relationship("DomainServerModel", back_populates="domain")
servers = association_proxy('domains_to_servers', 'server',
creator=lambda s: DomainServerModel(server=s))
class ServerModel(Base):
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
name = Column(String(128), nullable=False, unique=True, index=True)
domains_to_servers = relationship("DomainServerModel", back_populates="server")
domains = association_proxy('domains_to_servers', 'domain',
creator=lambda d: DomainServerModel(domain=d))
class DomainServerModel(Base):
__tablename__ = 'domains_to_servers'
id = Column(Integer, primary_key=True)
domain_id = Column(Integer, ForeignKey('domains.id'), nullable=False)
server_id = Column(Integer, ForeignKey('servers.id'), nullable=False)
server = relationship('ServerModel', back_populates="domains_to_servers")
domain = relationship('DomainModel', back_populates="domains_to_servers")
def test():
session = Session()
with session.no_autoflush:
s = session.query(ServerModel).filter_by(name='test.com').one()
print(s.domains)
d = DomainModel(name='test1.com')
session.add(d)
session.commit()
s.domains.append(d)
session.commit()
if __name__ == '__main__':
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
session = Session()
session.add(ServerModel(name='test.com'))
session.commit()
test()
I'm trying to add a new domain_to_server mapping via the server/domain association proxy. If I don't access the association proxy first, ie remove the print statement in test(), then I can add the domain without needing the session.no_autoflush block. But with the print statement in there, it will fail without the session.no_autoflush block with an IntegrityError, saying that server_id cannot be null in the domains to servers table.
I'm trying to figure out why the no_autoflush block is needed here. I don't see any mention of it in the association_proxy docs. Is this simply the way it is, and all inserts into an association_proxy should to happen in a no_autoflush bock in case it has been accessed prior to the insert?
I have an Alchemy ORM object:
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyORM(Base):
id = Column(Integer, primary_key=True)
name = Column(String(128), unique=True, nullable=False)
When using alembic to create the table I do the following:
def upgrade():
op.create_table(
'myorm',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(128), nullable=False),
)
Question: Is there a way to use the MyORM class to create the table? Something like this:
def upgrade():
op.create_table(
'myorm',
sa.BaseObject(MyORM)
)
This is exactly what Alembic migrations are trying to avoid. If you tie your migration to the current state of your model, it will not be a consistent upgrade path.
You can use declarative in your migrations to create tables and migrate data, but not to alter. You will have to re-create the definitions separate from the application definitions. This can be useful if you want to do a data migration and are more familiar with ORM queries instead of core queries.
Here is an example migration that creates Foo and Bar models with a many-to-many relationship using declarative, creates the tables, and inserts some data.
"""declarative
Revision ID: 169ad57156f0
Revises: 29b4c2bfce6d
Create Date: 2014-06-25 09:00:06.784170
"""
revision = '169ad57156f0'
down_revision = '29b4c2bfce6d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
Session = sessionmaker()
Base = declarative_base()
class Foo(Base):
__tablename__ = 'foo'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
class Bar(Base):
__tablename__ = 'bar'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
foos = relationship(Foo, lambda: foo_bar, backref='bars')
foo_bar = sa.Table(
'foo_bar', Base.metadata,
sa.Column('foo_id', sa.Integer, sa.ForeignKey('foo.id'), primary_key=True),
sa.Column('bar_id', sa.Integer, sa.ForeignKey('bar.id'), primary_key=True)
)
def upgrade():
bind = op.get_bind()
Base.metadata.create_all(bind=bind)
session = Session(bind=bind)
session._model_changes = False # if you are using Flask-SQLAlchemy, this works around a bug
f1 = Foo(name='f1')
f2 = Foo(name='f2')
b1 = Bar(name='b1')
b2 = Bar(name='b2')
f1.bars = [b1, b2]
b2.foos.append(f2)
session.add_all([f1, f2, b1, b2])
session.commit()
def downgrade():
bind = op.get_bind()
# in this case all we need to do is drop the tables
# Base.metadata.drop_all(bind=bind)
# but we could also delete data
session = Session(bind=bind)
session._model_changes = False # if you are using Flask-SQLAlchemy, this works around a bug
b1 = session.query(Bar).filter_by(name='b1').one()
session.delete(b1)
session.commit()