Checked for these 4 things, and I don't seem to see issues:
1. Same data type
2. Same nullable designation
3. Foreign Key being assigned to PK or Unique column
4. Same Charset for both
Foreign Key: table_entity.db_id FOR Unique Column: db_entity.db_id
db_entity_schema.py (Parent)
from sqlalchemy import Column, String, SMALLINT
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT, CHAR
class DbEntity(Base=declartive_base):
__tablename__ = "db_entity"
seq = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), unique=True, nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
table_entity_schema.py (Child)
from sqlalchemy import Column, String, ForeignKey
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT
from sqlalchemy import UniqueConstraint
class TableEntity(Base=declartive_base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey("db_entity.db_id"), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
When migrating, resulting in a errno: 150 "Foreign key constraint is incorrectly formed" which is quite puzzling.
Have you tried to declare the related object like
from sqlalchemy import Column, String, ForeignKey
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT
from sqlalchemy import UniqueConstraint
class TableEntity(Base=declartive_base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey(DbEntity.db_id), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
With that said, it seems like you have a perfectly good primary key on the DbEntity.seq, why not use that for your foreign key?
This probably isn't the complete answer but I'm posting this to display the migration I autogenerated to see if it helps narrow down the issue.
Using SQLAlchemy 1.4.45, alembic 1.9.0, pymysql 1.0.2 and mysql 8.0.31-1.el8 (docker image).
I don't know much about mysql drivers so I have no preference.
771ce744af5d_.py
"""empty message
Revision ID: 771ce744af5d
Revises:
Create Date: 2022-12-22 02:11:08.325864
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '771ce744af5d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('db_entity',
sa.Column('seq', mysql.BIGINT(display_width=20), autoincrement=True, nullable=False),
sa.Column('db_id', sa.String(length=24), nullable=False, comment='DB ID'),
sa.Column('db_service_id', sa.String(length=24), nullable=False),
sa.PrimaryKeyConstraint('seq'),
sa.UniqueConstraint('db_id'),
schema='testdb'
)
op.create_table('table_entity',
sa.Column('table_no', mysql.BIGINT(display_width=20), autoincrement=True, nullable=False),
sa.Column('db_id', sa.String(length=24), nullable=False, comment='DB ID'),
sa.Column('db_service_id', sa.String(length=24), nullable=False),
sa.Column('schema_name', sa.String(length=128), nullable=False),
sa.Column('table_name', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['db_id'], ['testdb.db_entity.db_id'], ),
sa.PrimaryKeyConstraint('table_no'),
sa.UniqueConstraint('schema_name', 'table_name', name='schema_table_uq_constraint'),
schema='testdb'
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('table_entity', schema='testdb')
op.drop_table('db_entity', schema='testdb')
# ### end Alembic commands ###
Alembic's env.py
I just slopped this together to autogenerate a revision so please excuse it's formatting and messy imports.
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
from sqlalchemy import (
Integer,
String,
ForeignKey,
UniqueConstraint,
)
from sqlalchemy.schema import (
Column,
)
from sqlalchemy.orm import backref, relationship, declarative_base, Session
from sqlalchemy import create_engine, MetaData, Column, ForeignKey, Integer, String
Base = declarative_base(metadata=MetaData(schema="testdb"))
from sqlalchemy import Column, String, SMALLINT
from sqlalchemy.dialects.mysql import SMALLINT, TINYINT, BIGINT, CHAR
class DbEntity(Base):
__tablename__ = "db_entity"
seq = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), unique=True, nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
class TableEntity(Base):
__tablename__ = "table_entity"
__table_args__ = (UniqueConstraint("schema_name", "table_name", name="schema_table_uq_constraint"),)
table_no = Column(BIGINT(20), primary_key=True, autoincrement=True, nullable=False)
db_id = Column(String(24), ForeignKey("db_entity.db_id"), nullable=False, comment="DB ID")
db_service_id = Column(String(24), nullable=False)
schema_name = Column(String(128), nullable=False)
table_name = Column(String(128), nullable=False)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
Related
I'm trying to implement a basic resource access using SQL Alchemy 1.4 and a PostgreSQL database.
Python code
from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base
Base: DeclarativeMeta = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email = Column(String(length=255), index=True, nullable=False)
class Resource(Base):
__tablename__ = "resource"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = Column(String(length=255), index=True, nullable=False)
class UserResourceRole(Base):
__tablename__ = "user_resource_role"
user_id = Column(
UUID(as_uuid=True),
ForeignKey("user.id", ondelete="CASCADE"),
primary_key=True
)
resource_id = Column(
UUID(as_uuid=True),
ForeignKey("resource.id", ondelete="CASCADE"),
primary_key=True,
)
can_edit = Column(Boolean, default=False, nullable=False)
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.ext.asyncio.engine import AsyncEngine
from sqlalchemy.orm import sessionmaker
from the_other_file import User, Resource, UserResourceRole
async def select_all(user: User, db_session: AsyncSession):
results = await db_session.execute(
select(Resource, UserResourceRole)
.join(
UserResourceRole,
Resource.id == UserResourceRole.resource_id
)
.where(UserResourceRole.user_id == user.id)
)
return results.scalars().all()
engine: AsyncEngine = create_async_engine(POSTGRES_URL, future=True)
async_session = sessionmaker(
bind=engine, class_=AsyncSession, expire_on_commit=False, future=True
)
# ignore the fact that it's not executed in asyncio loop
a_db_session = await async_session()
resources = await select_all(user=a_real_user_is_here, db_session=a_db_session)
print(resources)
I can't retrieve anything from UserResourceRole in my results. It only contains data from Resource. If i swap the objects in the select call, then I can only retrieve data from UserResourceRole.
What I'm expecting
I'm expecting to have the same result of that SQL query :
SELECT *
FROM resource
INNER JOIN user_resource_role
ON resource.id = user_resource_role.resource_id
WHERE user_resource_role.user_id = :a_user_id
The query generated by SQL Alchemy is exactly the same (except the verbosity) :
SELECT resource.id, resource.name, user_resource_role.user_id, user_resource_role.resource_id, user_resource_role.can_edit
FROM resource
JOIN user_resource_role
ON resource.id = user_resource_role.resource_id
WHERE user_resource_role.user_id = :user_id_1
If you try
for entry in results:
print(entry)
it will show you a list of tuple of (Resource, UserResourceRole). Apparently the call to .scalars().all() only leave the first value.
My current solution is to turn results into a list and manually manipulate it.
Remove scalars(). So, it should be
return results.all()
This will return a list of tuples
[(resource_1, userresourcerole_1),(resource_2, userresourcerole_2),...]
Using sqlalchemy 1.4.x, I've set up the following classes:
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import declarative_base, sessionmaker
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.orm import backref, relationship
from sqlalchemy import ForeignKey, Column, Integer, Unicode
from sqlalchemy import create_engine
engine = create_engine("sqlite:///:memory:", echo=True)
Session = sessionmaker(bind=engine)
session = Session()
Base = declarative_base()
class Infra(Base):
__tablename__ = "infra"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(Unicode(200), index=True, unique=True)
protections = association_proxy(
"infra_protections",
"pscore",
creator=lambda k, v: Infra_Protection(protection=k, pscore=v),
)
class Protection(Base):
__tablename__ = "protection"
id = Column(Integer, primary_key=True, autoincrement=True)
ptype = Column(Unicode(200), index=True, unique=True)
def __init__(self, protection):
self.ptype = protection
class Infra_Protection(Base):
__tablename__ = "infraprotection"
infra_id = Column(
Integer, ForeignKey("infra.id", ondelete="CASCADE"), primary_key=True
)
protection_id = Column(
Integer, ForeignKey("protection.id", ondelete="CASCADE"), primary_key=True
)
prot = relationship("Protection")
protection = association_proxy("prot", "ptype")
infra = relationship(
Infra,
backref=backref(
"infra_protections",
collection_class=attribute_mapped_collection("protection"),
cascade="all, delete-orphan",
),
)
pscore = Column(Integer, nullable=False, unique=False, server_default="0")
Now I'd like to add some Infra objects, and associated Protections:
Base.metadata.create_all(engine)
i = Infra(name="Foo")
i.protections["test"] = 1
i.protections["test 2"] = 2
session.add(i)
session.commit()
# now, add another
j = Infra(name="Bar")
j.protections["test"] = 3
j.protections["test 2"] = 4
session.add(j)
session.commit() # UNIQUE constraint failed: protection.ptype
It's obvious why the unique constraint is violated, but I'm wondering how I can modify my association proxy setup to avoid this in a reasonably robust way. Some sort of get_or_create on the Protection __init__?
The way to deal with this is by implementing one of the UniqueObject recipes from here: https://github.com/sqlalchemy/sqlalchemy/wiki/UniqueObject
I get the following error typeerror: object() takes no parameters on a mysql command which doesn't make sense to me. The error is due to dropping the username column from my models.py which I no longer require
Below is the user model definition in my models.py file, I want to drop the username column and get the above error when I remove it from the model
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True, unique=True)
firstname = db.Column(db.String(128))
lastname = db.Column(db.String(128))
email = db.Column(db.String(120), index=True, unique=True)
Below is my db_migrate.py file which I used to update the db. I got this from Miguel's database tutorial
#!flask/bin/python
import types
from migrate.versioning import api
from app import db
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1))
tmp_module = types.ModuleType('old_model')
old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
exec(old_model, tmp_module.__dict__)
script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata)
open(migration, "wt").write(script)
api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('New migration saved as ' + migration)
print('Current database version: ' + str(v))
This is the 005_migration.py file
from sqlalchemy import *
from migrate import *
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
user = Table('user', pre_meta,
Column('id', INTEGER(display_width=11), primary_key=True, nullable=False),
Column('username', VARCHAR(length=32)),
Column('email', VARCHAR(length=120)),
Column('password_hash', VARCHAR(length=128)),
Column('firstname', VARCHAR(length=128)),
Column('lastname', VARCHAR(length=128)),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
pre_meta.tables['user'].columns['username'].drop()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
pre_meta.tables['user'].columns['username'].create()
I think you are using the wrong data types for Column definition, here are the right ones:
from sqlalchemy import (MetaData, Table, Column, Integer, String)
user = Table('user', pre_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('username', String(32)),
Column('email', String(120)),
Column('firstname', String(128)),
Column('lastname', String(128)),
)
For more details on SQLAlchemy Column and Data Types, check this link
The error is not due to removing a column, it originates from the line
Column('id', INTEGER(display_width=11), primary_key=True, nullable=False),
which the traceback clearly indicates.
INTEGER, which you import with from sqlalchemy import * takes no arguments, such as display_width. You should be using the dialect specific data type sqlalchemy.dialects.mysql.INTEGER.
The model.py looks like this:
import datetime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Numeric, ForeignKey, DateTime, Boolean
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, relationship
from configs import config_base as config
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(String, unique=True, primary_key=True)
name = Column(String(100), nullable=False)
team_id = Column(String, ForeignKey('team.id'))
last_modified_on = Column(DateTime, default=datetime.datetime.utcnow())
team = relationship('Team', back_populates='members')
class Team(Base):
__tablename__ = 'team'
id = Column(String, unique=True, primary_key=True)
name = Column(String, nullable=False)
bot_access_token = Column(String(100), nullable=False)
bot_user_id = Column(String(100), nullable=False)
last_modified_on = Column(DateTime, default=datetime.datetime.utcnow())
is_active = Column(Boolean, default=True)
members = relationship('User', back_populates='team')
is_first_time_news = Column(Boolean, default=True)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
I just added is_first_time_news via this alembic migration:
revision = '6f9e2d360276'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('team', sa.Column('is_first_time_news', sa.Boolean, default=False))
def downgrade():
op.drop_column('team', sa.Column('is_first_time_news', sa.Boolean))
alembic upgrade head works great.
But when I do a alembic downgrade -1 I get a strange exception:
AttributeError: Neither 'Column' object nor 'Comparator' object has an
attribute '_columns'
Are you using sqlite? Sqlite does not allow you to drop a column from the
scheme. I had a similar problem when I tried to downgrade a local sqlite database I was testing.
SQLite supports a limited subset of ALTER TABLE. The ALTER TABLE
command in SQLite allows the user to rename a table or to add a new
column to an existing table.
https://www.sqlite.org/lang_altertable.html
Try:
def downgrade():
op.drop_column('team', 'is_first_time_news')
I have an Alchemy ORM object:
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyORM(Base):
id = Column(Integer, primary_key=True)
name = Column(String(128), unique=True, nullable=False)
When using alembic to create the table I do the following:
def upgrade():
op.create_table(
'myorm',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(128), nullable=False),
)
Question: Is there a way to use the MyORM class to create the table? Something like this:
def upgrade():
op.create_table(
'myorm',
sa.BaseObject(MyORM)
)
This is exactly what Alembic migrations are trying to avoid. If you tie your migration to the current state of your model, it will not be a consistent upgrade path.
You can use declarative in your migrations to create tables and migrate data, but not to alter. You will have to re-create the definitions separate from the application definitions. This can be useful if you want to do a data migration and are more familiar with ORM queries instead of core queries.
Here is an example migration that creates Foo and Bar models with a many-to-many relationship using declarative, creates the tables, and inserts some data.
"""declarative
Revision ID: 169ad57156f0
Revises: 29b4c2bfce6d
Create Date: 2014-06-25 09:00:06.784170
"""
revision = '169ad57156f0'
down_revision = '29b4c2bfce6d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
Session = sessionmaker()
Base = declarative_base()
class Foo(Base):
__tablename__ = 'foo'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
class Bar(Base):
__tablename__ = 'bar'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
foos = relationship(Foo, lambda: foo_bar, backref='bars')
foo_bar = sa.Table(
'foo_bar', Base.metadata,
sa.Column('foo_id', sa.Integer, sa.ForeignKey('foo.id'), primary_key=True),
sa.Column('bar_id', sa.Integer, sa.ForeignKey('bar.id'), primary_key=True)
)
def upgrade():
bind = op.get_bind()
Base.metadata.create_all(bind=bind)
session = Session(bind=bind)
session._model_changes = False # if you are using Flask-SQLAlchemy, this works around a bug
f1 = Foo(name='f1')
f2 = Foo(name='f2')
b1 = Bar(name='b1')
b2 = Bar(name='b2')
f1.bars = [b1, b2]
b2.foos.append(f2)
session.add_all([f1, f2, b1, b2])
session.commit()
def downgrade():
bind = op.get_bind()
# in this case all we need to do is drop the tables
# Base.metadata.drop_all(bind=bind)
# but we could also delete data
session = Session(bind=bind)
session._model_changes = False # if you are using Flask-SQLAlchemy, this works around a bug
b1 = session.query(Bar).filter_by(name='b1').one()
session.delete(b1)
session.commit()