invalid values accepted for insert despite ForeignKey [duplicate] - python

This question already has answers here:
Sqlite / SQLAlchemy: how to enforce Foreign Keys?
(9 answers)
Closed 5 years ago.
In the below code why is there no error when inserting into table t1? Column b in t1 is a foreign key, so it should only accept values from column c in t2, but somehow I can insert 'bar' without an error. What am I missing here?
from sqlalchemy import create_engine, MetaData, Table, Column, Unicode, ForeignKey
engine = create_engine(r'sqlite:///XXXXXXX.db', echo=True)
metadata = MetaData()
t1 = Table('t1', metadata,
Column('a', Unicode(), primary_key=True),
Column('b', Unicode(), ForeignKey('t2.c')))
t2 = Table('t2', metadata,
Column('c', Unicode(), primary_key=True))
metadata.create_all(engine)
conn = engine.connect()
conn.execute(t2.insert().values(c='first'))
conn.execute(t2.insert().values(c='second'))
conn.execute(t1.insert().values(a='foo', b='bar'))
EDIT
I don't think this question should be marked as a duplicate of the linked one. The linked question is about how to enforce Foreign Key assuming you already know it is not on by default. In my question I observe a strange behaviour (violation of Foreign Key constraint) and ask for a root cause.

Thanks Ilja for a push in the right direction. I modified the code in the following way and now it's working as expected, that is raising IntegrityError on the last line.
from sqlalchemy import create_engine, MetaData, Table, Column, Unicode, ForeignKey
from sqlalchemy.engine import Engine
from sqlalchemy import event
#event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
engine = create_engine(r'sqlite:///XXXXXXX.db', echo=True)
metadata = MetaData()
t1 = Table('t1', metadata,
Column('a', Unicode(), primary_key=True),
Column('b', Unicode(), ForeignKey('t2.c')))
t2 = Table('t2', metadata,
Column('c', Unicode(), primary_key=True))
metadata.create_all(engine)
conn = engine.connect()
conn.execute(t2.insert().values(c='first'))
conn.execute(t2.insert().values(c='second'))
conn.execute(t1.insert().values(a='foo', b='bar'))

Related

SQLAlchemy: ForeignKey across schemas

In my postgres server we have a database database with 2 schemas: public and api.
public has several tables, and I need to create a table in api with a foreign key to a table in public called model.
So it's:
-Schemas
--public
---tables
----models
--api
---tables
Using SQLAlchemy I have the following class:
from sqlalchemy import create_engine, MetaData, Table, Column
class __PostgresService:
def __init__(self):
self.__client = create_engine("postgresql://postgres#localhost:5432/database")
metadata = MetaData(self.__client, schema="public")
self.__table = Table("training", metadata,
Column("id", String, primary_key=True, nullable=False),
Column("model_id", ForeignKey("model.id"), nullable=False),
schema="api")
metadata.create_all()
postgres_service = __PostgresService()
However upon launch I receive the following error:
sqlalchemy.exc.NoReferencedTableError: Foreign key associated with column 'training.model_id' could not find table 'public.model' with which to generate a foreign key to target column 'id'
It seems it does look for the correct thing but can't find it? I'm very confused as to why this is happening, especially because the error refers to not finding "public", which is created by default by postgres, rather than "api" which I created myself in pgAdmin.
Am I missing some cruicial config?
The error you are getting means that you are trying to create a foreign key referencing a table that SQLAlchemy does not know about. You can tell sqlalchemy about it by creating a Table associated with the same MetaData describing the referenced table. You can also do this using sqlalchemy's reflection capabilities. For example:
from sqlalchemy import create_engine, MetaData, Table, Column
class __PostgresService:
def __init__(self):
self.__client = create_engine("postgresql://postgres#localhost:5432/database")
metadata = MetaData(self.__client, schema="public")
metadata.reflect(schema="public", only=["model"])
self.__table = Table("training", metadata,
Column("id", String, primary_key=True, nullable=False),
Column("model_id", ForeignKey("model.id"), nullable=False),
schema="api")
metadata.create_all()
postgres_service = __PostgresService()
By default, MetaData.create_all() will check for the existence of tables first, before creating them, but you can also specify the exact tables to create: metadata.create_all(tables=[self.__table])

SQLAlchemy 2.0 NotImplementedError: engine.execute

I am getting a NotImplementedError: This method is not implemented for SQLAlchemy 2.0., when trying to delete a table using the delete method in SQLAlchemy v1.4.15.
from sqlalchemy import Column, Integer, MetaData, String, Table, create_engine, delete
engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
metadata = MetaData()
user = Table(
"users", metadata, Column("id", Integer, primary_key=True), Column("name", String)
)
metadata.create_all(engine)
engine.execute(user.delete()) # leading to Traceback
# NotImplementedError: This method is not implemented for SQLAlchemy 2.0.
engine.execute is deprecated in SQLAlchemy 1.4, and will be removed in SQLAlchemy 2.0. You need to call a connection's execute method instead:
from sqlalchemy import Column, Integer, MetaData, String, Table, create_engine, delete
# Setting future=True enforces 2.0 behaviour and disables
# legacy features.
engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
metadata = MetaData()
user = Table(
"users", metadata, Column("id", Integer, primary_key=True), Column("name", String)
)
metadata.create_all(engine)
stmt = delete(user)
with engine.connect() as conn:
with conn.begin(): # Optional: start a transaction
conn.execute(stmt)

SqlLite with SqlAlchemy CreateTable result in database is locked

While trying to figure out why my tests fails to create sqlite schema, I tried this very simple example issued from https://pysheeet.readthedocs.io/en/latest/notes/python-sqlalchemy.html
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import Column
from sqlalchemy import Integer, String
db_uri = 'sqlite:///db.sqlite'
engine = create_engine(db_uri)
# Create a metadata instance
metadata = MetaData(engine)
# Declare a table
table = Table('Example',metadata,
Column('id',Integer, primary_key=True),
Column('name',String))
# Create all tables
metadata.create_all()
for _t in metadata.tables:
print("Table: ", _t)
But still I get this db locked message:
sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) database is locked [SQL: '\nCREATE TABLE "Example" (\n\tid INTEGER NOT NULL, \n\tname VARCHAR, \n\tPRIMARY KEY (id)\n)\n\n'] (Background on this error at: http://sqlalche.me/e/e3q8)
Any idea of how to use create all with non 'in memory' sqlite db ?

SQLAlchemy: Update table structure

I created the table on the server using SQLALchemy:
from sqlalchemy import create_engine, Table, Column, String, MetaData
engine = create_engine('mssql://server/database?driver=SQL+Server&trusted_connection=yes')
meta = MetaData()
table = Table('test17', meta,
Column('id', Integer, primary_key=True),
Column('name', String('255'))
)
metadata.create_all(engine)
Then I connected to this database using SSMS 2012 and added a new column:
ALTER TABLE test17 ADD age INT NULL
How do I tell using SQLALchemy that a new column appears in the table?
I tried to do something like:
meta2 = MetaData()
table = Table('test17', meta, autoload=True, autoload_with=engine)
But in the end I get the same table structure that I defined initially using SQLALchemy.
I think you forgot to bind your MetaData to the engine. Create the engine first, then read the metadata from the db using the engine.
metadata = db.MetaData(bind=engine)
test17 = db.Table('test17', metadata, autoload=True)

Get existing table using SQLAlchemy MetaData

I have a table that already exists:
USERS_TABLE = Table("users", META_DATA,
Column("id", Integer, Sequence("user_id_seq"), primary_key=True),
Column("first_name", String(255)),
Column("last_name", String(255))
)
I created this table by running this:
CONN = create_engine(DB_URL, client_encoding="UTF-8")
META_DATA = MetaData(bind=CONN, reflect=True)
# ... table code
META_DATA.create_all(CONN, checkfirst=True)
the first time it worked and I was able to create the table. However, the 2nd time around I got this error:
sqlalchemy.exc.InvalidRequestError: Table 'users' is already defined for this MetaData instance. Specify 'extend_existing=True' to redefine options and columns on an existing Table object.
which makes sense since the table users already exists. I'm able to see if the table exists like so:
TABLE_EXISTS = CONN.dialect.has_table(CONN, "users")
However, how do I actually get the existing table object? I can't find this anywhere in the documentation. Please help.
We have 3 different approaches here:
assume that required tables have been created already, reflecting them and getting with MetaData.tables dictionary field like
from sqlalchemy import MetaData, create_engine
CONN = create_engine(DB_URL, client_encoding="UTF-8")
META_DATA = MetaData(bind=CONN, reflect=True)
USERS_TABLE = META_DATA.tables['users']
removing reflect flag from MetaData object initialization, because we don't use it and moreover – trying to create tables that've been already reflected:
from sqlalchemy import (MetaData, Table, Column, Integer, String, Sequence,
create_engine)
CONN = create_engine('sqlite:///db.sql')
META_DATA = MetaData(bind=CONN)
USERS_TABLE = Table("users", META_DATA,
Column("id", Integer, Sequence("user_id_seq"),
primary_key=True),
Column("first_name", String(255)),
Column("last_name", String(255)))
META_DATA.create_all(CONN, checkfirst=True)
assuming that we are keeping reflected table if it was previously created by setting in Table object initializer keep_existing flag to True:
from sqlalchemy import (MetaData, Table, Column, Integer, String, Sequence,
create_engine)
CONN = create_engine('sqlite:///db.sql')
META_DATA = MetaData(bind=CONN, reflect=True)
USERS_TABLE = Table("users", META_DATA,
Column("id", Integer, Sequence("user_id_seq"),
primary_key=True),
Column("first_name", String(255)),
Column("last_name", String(255)),
keep_existing=True)
META_DATA.create_all(CONN, checkfirst=True)
Which one to choose? Depends on your use case, but I prefer second one since it looks like you aren't using reflection, also it is simplest modification: just removing flag from MetaData initializer.
P. S.
we can always make reflection after initialization of MetaData object with MetaData.reflect method:
META_DATA.reflect()
also we can specify which tables to reflect with only parameter (may be any iterable of str objects):
META_DATA.reflect(only=['users'])
and many more.
This works for me pretty well -
import sqlalchemy as db
engine = db.create_engine("your_connection_string")
meta_data = db.MetaData(bind=engine)
db.MetaData.reflect(meta_data)
USERS = meta_data.tables['users']
# View the columns present in the users table
print(USERS.columns)
# You can run sqlalchemy queries
query = db.select([
USERS.c.id,
USERS.c.first_name,
USERS.c.last_name,
])
result = engine.execute(query).fetchall()
Note that using reflect parameter in Metadata(bind=engine, reflect=True) is deprecated and will be removed in a future release. Above code takes care of it.
__table_args__ = {'extend_existing': True}
right below __tablename__
If you're using async Sqlalchemy, you can use
metadata = MetaData()
async with engine.connect() as conn:
await conn.run_sync(metadata.reflect, only=["harshit_table"])
harshit_table = Table("harshit_table", metadata, autoload_with=engine)
print("tables: ", harshit_table, type(harshit_table))
I'm quite new to this, but what worked for me was this (variables are declared in the original question)
USERS_TABLE_NEW = Table("users", META_DATA, autoload_with=CONN)

Categories