How would you convert the following codes to Python's ORM such as by SQLalchemy?
#1 Putting data to Pg
import os, pg, sys, re, psycopg2
#conn = psycopg2.connect("dbname='tkk' host='localhost' port='5432' user='noa' password='123'")
conn = psycopg2.connect("dbname=tk user=naa password=123")
cur = conn.cursor()
cur.execute("""INSERT INTO courses (course_nro)
VALUES ( %(course_nro)s )""", dict(course_nro='abcd'))
conn.commit()
#2 Fetching
cur.execute("SELECT * FROM courses")
print cur.fetchall()
Examples about the two commands in SQLalchemy
insert
sqlalchemy.sql.expression.insert(table, values=None, inline=False, **kwargs)
select
sqlalchemy.sql.expression.select(columns=None, whereclause=None, from_obj=[], **kwargs)
After the initial declarations, you can do something like this:
o = Course(course_nro='abcd')
session.add(o)
session.commit()
and
print session.query(Course).all()
The declarations could look something like this:
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import session_maker
# create an engine, and a base class
engine = create_engine('postgre://naa:123#localhost/tk')
DeclarativeBase = declarative_base(bind=engine)
metadata = DeclarativeBase.metadata
# create a session
Session = session_maker(engine)
session = Session()
# declare the models
class Cource(DelcarativeBase):
__tablename__ = 'courses'
course_nro = Column('course_nro', CHAR(12))
This declarative method is just one way of using sqlalchemy.
Even though this is old, more examples can't hurt, right? I thought I'd demonstrate how to do this with PyORMish.
from pyormish import Model
class Course(Model):
_TABLE_NAME = 'courses'
_PRIMARY_FIELD = 'id' # or whatever your primary field is
_SELECT_FIELDS = ('id','course_nro')
_COMMIT_FIELDS = ('course_nro',)
Model.db_config = dict(
DB_TYPE='postgres',
DB_CONN_STRING='postgre://naa:123#localhost/tk'
)
To create:
new_course = Course().create(course_nro='abcd')
To select:
# return the first row WHERE course_nro='abcd'
new_course = Course().get_by_fields(course_nro='abcd')
Related
I have a simple database storing an attachment as blob.
CREATE TABLE public.attachment
(
id integer NOT NULL,
attachdata oid,
CONSTRAINT attachment_pkey PRIMARY KEY (id)
)
-- Import a file
INSERT INTO attachment (id, attachdata) VALUES (1, lo_import('C:\\temp\blob_import.txt'))
-- Export back as file.
SELECT lo_export(attachdata, 'C:\temp\blob_export_postgres.txt') FROM attachment WHERE id = 1
I'm able to read this file back using psycopg2 directly.
from psycopg2 import connect
con = connect(dbname="blobtest", user="postgres", password="postgres", host="localhost")
cur = con.cursor()
cur.execute("SELECT attachdata FROM attachment WHERE id = 1")
oid = cur.fetchone()[0]
obj = con.lobject(oid)
obj.export('C:\\temp\\blob_export_psycopg.txt')
When I try the same using sqlalchemy, the attachdata is a bytestring of zeros.
I've tested the following code with types like BLOB, LargeBinary and BINARY.
The size of attachdata bytstring seems to be the OIDs value.
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, Binary
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
Session = sessionmaker()
engine = create_engine('postgresql://postgres:postgres#localhost:5432/blobtest', echo=True)
Base.metadata.create_all(engine)
Session.configure(bind=engine)
class Attachment(Base):
__tablename__ ="attachment"
id = Column(Integer, primary_key=True)
attachdata = Column(Binary)
session = Session()
attachment = session.query(Attachment).get(1)
with open('C:\\temp\\blob_export_sqlalchemy.txt', 'wb') as f:
f.write(attachment.attachdata)
I've searched the sqlalchemy documentation and various sources and couldn't find a solution how to export the binary data using sqlalchemy.
I had the same problem. There seems to be no way to get the large object data via the ORM. So I combined the ORM and the psycopg2 engine like this:
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.dialects.postgresql import OID
Base = declarative_base()
session_factory = sessionmaker()
engine = create_engine('postgresql+psycopg2://postgres:postgres#localhost:5432/postgres', echo=True)
Base.metadata.create_all(engine)
session_factory.configure(bind=engine)
Session = scoped_session(session_factory)
class Attachment(Base):
__tablename__ ="attachment"
id = Column(Integer, primary_key=True)
oid = Column(OID)
#classmethod
def insert_file(cls, filename):
conn = engine.raw_connection()
l_obj = conn.lobject(0, 'wb', 0)
with open(filename, 'rb') as f:
l_obj.write(f.read())
conn.commit()
conn.close()
session = Session()
attachment = cls(oid=l_obj.oid)
session.add(attachment)
session.commit()
return attachment.id
#classmethod
def get_file(cls, attachment_id, filename):
session = Session()
attachment = session.query(Attachment).get(attachment_id)
conn = engine.raw_connection()
l_obj = conn.lobject(attachment.oid, 'rb')
with open(filename, 'wb') as f:
f.write(l_obj.read())
conn.close()
if __name__ == '__main__':
my_id = Attachment.insert_file(r'C:\path\to\file')
Attachment.get_file(my_id, r'C:\path\to\file_out')
Not very elegant but it seems to work.
Update:
I am using events now
from sqlalchemy import create_engine, event
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.dialects.postgresql import OID
Base = declarative_base()
session_factory = sessionmaker()
engine = create_engine('postgresql+psycopg2://postgres:postgres#localhost:5432/postgres', echo=True)
Base.metadata.create_all(engine)
session_factory.configure(bind=engine)
Session = scoped_session(session_factory)
class Data(Base):
__tablename__ = "attachment"
id = Column(Integer, primary_key=True)
oid = Column(OID)
#event.listens_for(Data, 'after_delete')
def remove_large_object_after_delete(_, connection, target):
raw_connection = connection.connection
l_obj = raw_connection.lobject(target.oid, 'n')
l_obj.unlink()
raw_connection.commit()
#event.listens_for(Data, 'before_insert')
def add_large_object_before_insert(_, connection, target):
raw_connection = connection.connection
l_obj = raw_connection.lobject(0, 'wb', 0)
target.oid = l_obj.oid
l_obj.write(target.ldata)
raw_connection.commit()
#event.listens_for(Data, 'load')
def inject_large_object_after_load(target, _):
session = object_session(target)
conn = session.get_bind().raw_connection()
l_obj = conn.lobject(target.oid, 'rb')
target.ldata = l_obj.read()
if __name__ == '__main__':
session = Session()
# Put
data = Data()
data.ldata = 'your large data'
session.add(data)
session.commit()
id = data.id
# Get
data2 = session.query(Data).get(id)
print(data.ldata) # Your large data is here
# Delete
session.delete(data)
session.delete(data2)
session.commit()
session.flush()
session.close()
Works good so far.
I don't understand why postgres large objects get so neglected these days. I use them a ton. Or let's say I want to but it's challenging especially in asyncio....
I have a database that I don't have metadata or orm classes for (the database already exists).
I managed to get the select stuff working by:
from sqlalchemy.sql.expression import ColumnClause
from sqlalchemy.sql import table, column, select, update, insert
from sqlalchemy.ext.declarative import *
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
import pyodbc
db = create_engine('mssql+pyodbc://pytest')
Session = sessionmaker(bind=db)
session = Session()
list = []
list.append (column("field1"))
list.append (column("field2"))
list.append (column("field3"))
s = select(list)
s.append_from('table')
s.append_whereclause("field1 = 'abc'")
s = s.limit(10)
result = session.execute(s)
out = result.fetchall()
print(out)
So far so good.
The only way I can get an update/insert working is by executing a raw query like:
session.execute(<Some sql>)
I would like to make it so I can make a class out of that like:
u = Update("table")
u.Set("file1","some value")
u.Where(<some conditon>)
seasion.execute(u)
Tried (this is just one of the approaches I tried):
i = insert("table")
v = i.values([{"name":"name1"}, {"name":"name2"}])
u = update("table")
u = u.values({"name": "test1"})
I can't get that to execute on:
session.execute(i)
or
session.execute(u)
Any suggestion how to construct an insert or update without writing ORM models?
As you can see from the SQLAlchemy Overview documentation, sqlalchemy is build with two layers: ORM and Core. Currently you are using only some constructs of the Core and building everything manually.
In order to use Core you should let SQLAlchemy know some meta information about your database in order for it to operate on it. Assuming you have a table mytable with columns field1, field2, field3 and a defined primary key, the code below should perform all the tasks you need:
from sqlalchemy.sql import table, column, select, update, insert
# define meta information
metadata = MetaData(bind=engine)
mytable = Table('mytable', metadata, autoload=True)
# select
s = mytable.select() # or:
#s = select([mytable]) # or (if only certain columns):
#s = select([mytable.c.field1, mytable.c.field2, mytable.c.field3])
s = s.where(mytable.c.field1 == 'abc')
result = session.execute(s)
out = result.fetchall()
print(out)
# insert
i = insert(mytable)
i = i.values({"field1": "value1", "field2": "value2"})
session.execute(i)
# update
u = update(mytable)
u = u.values({"field3": "new_value"})
u = u.where(mytable.c.id == 33)
session.execute(u)
I'm trying to write the following sql query with sqlalchemy ORM:
SELECT * FROM
(SELECT *, row_number() OVER(w)
FROM (select distinct on (grandma_id, author_id) * from contents) as c
WINDOW w AS (PARTITION BY grandma_id ORDER BY RANDOM())) AS v1
WHERE row_number <= 4;
This is what I've done so far:
s = Session()
unique_users_contents = (s.query(Content).distinct(Content.grandma_id,
Content.author_id)
.subquery())
windowed_contents = (s.query(Content,
func.row_number()
.over(partition_by=Content.grandma_id,
order_by=func.random()))
.select_from(unique_users_contents)).subquery()
contents = (s.query(Content).select_from(windowed_contents)
.filter(row_number >= 4)) ## how can I reference the row_number() value?
result = contents
for content in result:
print "%s\t%s\t%s" % (content.id, content.grandma_id,
content.author_id)
As you can see it's pretty much modeled, but I have no idea how to reference the row_number() result of the subquery from the outer query where. I tried something like windowed_contents.c.row_number and adding a label() call on the window func but it's not working, couldn't find any similar example in the official docs or in stackoverflow.
How can this be accomplished? And also, could you suggest a better way to do this query?
windowed_contents.c.row_number against a label() is how you'd do it, works for me (note the select_entity_from() method is new in SQLA 0.8.2 and will be needed here in 0.9 vs. select_from()):
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Content(Base):
__tablename__ = 'contents'
grandma_id = Column(Integer, primary_key=True)
author_id = Column(Integer, primary_key=True)
s = Session()
unique_users_contents = s.query(Content).distinct(
Content.grandma_id, Content.author_id).\
subquery('c')
q = s.query(
Content,
func.row_number().over(
partition_by=Content.grandma_id,
order_by=func.random()).label("row_number")
).select_entity_from(unique_users_contents).subquery()
q = s.query(Content).select_entity_from(q).filter(q.c.row_number <= 4)
print q
I am trying to obtain a row from DB, modify that row and save it again.
Everything by using SqlAlchemy
My code
from sqlalchemy import Column, DateTime, Integer, String, Table, MetaData
from sqlalchemy.orm import mapper
from sqlalchemy import create_engine, orm
metadata = MetaData()
product = Table('product', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(1024), nullable=False, unique=True),
)
class Product(object):
def __init__(self, id, name):
self.id = id
self.name = name
mapper(Product, product)
db = create_engine('sqlite:////' + db_path)
sm = orm.sessionmaker(bind=db, autoflush=True, autocommit=True, expire_on_commit=True)
session = orm.scoped_session(sm)
result = session.execute("select * from product where id = :id", {'id': 1}, mapper=Product)
prod = result.fetchone() #there are many products in db so query is ok
prod.name = 'test' #<- here I got AttributeError: 'RowProxy' object has no attribute 'name'
session .add(prod)
session .flush()
Unfortunately it does not work, because I am trying to modify RowProxy object. How can I do what I want (load, change and save(update) row) in SqlAlchemy ORM way?
I assume that your intention is to use Object-Relational API.
So to update row in db you'll need to do this by loading mapped object from the table record and updating object's property.
Please see code example below.
Please note I've added example code for creating new mapped object and creating first record in table also there is commented out code at the end for deleting the record.
from sqlalchemy import Column, DateTime, Integer, String, Table, MetaData
from sqlalchemy.orm import mapper
from sqlalchemy import create_engine, orm
metadata = MetaData()
product = Table('product', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(1024), nullable=False, unique=True),
)
class Product(object):
def __init__(self, id, name):
self.id = id
self.name = name
def __repr__(self):
return "%s(%r,%r)" % (self.__class__.name,self.id,self.name)
mapper(Product, product)
db = create_engine('sqlite:////temp/test123.db')
metadata.create_all(db)
sm = orm.sessionmaker(bind=db, autoflush=True, autocommit=True, expire_on_commit=True)
session = orm.scoped_session(sm)
#create new Product record:
if session.query(Product).filter(Product.id==1).count()==0:
new_prod = Product("1","Product1")
print "Creating new product: %r" % new_prod
session.add(new_prod)
session.flush()
else:
print "product with id 1 already exists: %r" % session.query(Product).filter(Product.id==1).one()
print "loading Product with id=1"
prod = session.query(Product).filter(Product.id==1).one()
print "current name: %s" % prod.name
prod.name = "new name"
print prod
prod.name = 'test'
session.add(prod)
session.flush()
print prod
#session.delete(prod)
#session.flush()
PS SQLAlchemy also provides SQL Expression API that allows to work with table records directly without creating mapped objects. In my practice we are using Object-Relation API in most of the applications, sometimes we use SQL Expressions API when we need to perform low level db operations efficiently such as inserting or updating thousands of records with one query.
Direct links to SQLAlchemy documentation:
Object Relational Tutorial
SQL Expression Language Tutorial
table:
id(integer primary key)
data(blob)
I use mysql and sqlalchemy.
To insert data I use:
o = Demo()
o.data = mydata
session.add(o)
session.commit()
I would like to insert to table like that:
INSERT INTO table(data) VALUES(COMPRESS(mydata))
How can I do this using sqlalchemy?
you can assign a SQL function to the attribute:
from sqlalchemy import func
object.data = func.compress(mydata)
session.add(object)
session.commit()
Here's an example using a more DB-agnostic lower() function:
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
Base= declarative_base()
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
data = Column(String)
e = create_engine('sqlite://', echo=True)
Base.metadata.create_all(e)
s = Session(e)
a1 = A()
a1.data = func.lower("SomeData")
s.add(a1)
s.commit()
assert a1.data == "somedata"
you can make it automatic with #validates:
from sqlalchemy.orm import validates
class MyClass(Base):
# ...
data = Column(BLOB)
#validates("data")
def _set_data(self, key, value):
return func.compress(value)
if you want it readable in python before the flush, you'd need to memoize it locally and use a descriptor to access it.