How share DB connection instance to other classes - python

I have 3 classes in my project. One is a DB class where I have the DB connection for my project, which looks like this.
class DB:
def __init__(self):
self.createConnection()
def createConnection(self):
db = DataBase(
[config.endpoint],
http_auth=(config.username, config.password),
scheme="https"
)
self.__db = db
Now in other classes I want to access and use self.__db How can I use this? My 2nd class is a helper class:
import db
class Helper:
def connection(self):
db = db.createConnection()
self.__db = db
print(self.__db)

Related

What is the best way to populate a table in SQLAlchemy?

I'm trying to create a list of possible roles of a web app user. If I define the roles table in this way:
roles = db.Table(
"roles",
db.Model.metadata,
db.Column("role_id", db.Integer, db.ForeignKey("task.id"), primary_key=True),
db.Column("name", db.String(32)),
)
What is the best method to populate it if I intend to only do that once (on database creation), and then never add any more rows to it?
I believe this paradigm is called "database seeding", this might help you when you are googling for answers.
I had a look online and found this:
https://pypi.org/project/Flask-Seeder/
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_seeder import FlaskSeeder
def create_app():
app = Flask(__name__)
db = SQLAlchemy()
db.init_app(app)
seeder = FlaskSeeder()
seeder.init_app(app, db)
return app
Then you can create a another file with your seeds.
from flask_seeder import Seeder, Faker, generator
# SQLAlchemy database model
class User(Base):
def __init__(self, id_num=None, name=None, age=None):
self.id_num = id_num
self.name = name
self.age = age
def __str__(self):
return "ID=%d, Name=%s, Age=%d" % (self.id_num, self.name, self.age)
# All seeders inherit from Seeder
class DemoSeeder(Seeder):
# run() will be called by Flask-Seeder
def run(self):
# Create a new Faker and tell it how to create User objects
faker = Faker(
cls=User,
init={
"id_num": generator.Sequence(),
"name": generator.Name(),
"age": generator.Integer(start=20, end=100)
}
)
# Create 5 users
for user in faker.create(5):
print("Adding user: %s" % user)
self.db.session.add(user)
And finally, you can call
$ flask seed run
to populate the database.

PonyORM Database Table Dynamic Definition

As far as I know from the research I've made, the typical way of defining a table using PonyORM in Python is like the following:
from pony.orm import *
db = Database()
# Database connection ...
class SampleTable(db.entity):
sample_int_field = Required(int)
sample_string_field = Required(str)
# ...
db.generate_mapping(create_tables=True)
My Problem: this uses db.entity
I wish to define a table without using the specific Databse instance in an abstract general manner, and connect it to the instance when I need to.
is there a way to do so?
concept (not real runnable code presumably):
# SampleAbstractTable.py
from pony.orm import *
class SampleAbstractTable(Database):
sample_int_field = Required(int)
sample_string_field = Required(str)
# ...
# main.py
from pony.orm import *
import SampleAbstractTable
db = Database()
# Database connection ...
db.connectTables((SampleAbstractTable.SampleAbstractTable, ...))
db.generate_mapping(create_tables=True)
EDIT:
One idea I have is to create a wrapper class for the database I wish to use with a certain group of tables, and define the tables in the init, because the whole point of me wishing to define tables dynamically is to seperate the Database instance creation from the table classes' definitions, namely:
from pony.orm import *
class sampleDatabase:
def __init__(self):
self._db = Database()
# Database connection ...
class TableA(db.entity):
# ...
class TableB(db.entity):
# ...
self._db.generate_mapping(create_tables=True)
but then I have issues in accessing the database tables...
First of all you're working with Entities, not Tables. They're not the same thing.
Your problem can be solved just defining the function like factory
def define_entities(db):
class Entity1(db.Entity):
attr1 = Required(str)
... and so on
And then later when you create your Database instance you just call
db = Database(...)
define_entities(db)

In Tornado Can I update database name in request handler dynamically?

Can I change database name defined in my Application class like below
or
What is right approach to change database name dynamically in Tornado?
class Application(tornado.web.Application):
def __init__(self):
self.db = "test"
In one of my Request Handler using value from args
class MainHandler(tornado.web.RequestHandler):
def initialize(self, database):
self.database = database
self.db = "new_test"
If by "dynamically" you mean you can modify it for different handlers, you can pass it to your URLSpec:
from tornado.web import url
from myhandlers import MyHandler
urls_list = [
url('/foo/bar', MyHandler, kwargs={'database': my_database}),
]
app = Application(urls_list)

How to Generate Fixtures from Database with SqlAlchemy

I'm starting to write tests with Flask-SQLAlchemy, and I'd like to add some fixtures for those. I have plenty of good data for that in my development database and a lot of tables so writing data manually would get annoying. I'd really like to just sample data from the dev database into fixtures and then use those. What's a good way to do this?
i would use factory boy
to create a model factory you just do:
import factory
from . import models
class UserFactory(factory.Factory):
class Meta:
model = models.User
first_name = 'John'
last_name = 'Doe'
admin = False
then to create instances:
UserFactory.create()
to add static data just give as kwarg to create
UserFactory.create(name='hank')
so to seed a bunch of stuff throw that in a for loop. :)
If you need to handle fixtures with SQLAlchemy or another ORM/backend then the Fixture package may be of use: Flask-Fixtures 0.3.3
That is a simple library that allows you to add database fixtures for your unit tests using nothing but JSON or YAML.
While Kyle's answer is correct, we still need to provide the model factory with a database session, otherwise we would never actually commit to the db. Also, factory boy has a dedicated class SQLAlchemyModelFactory for interacting with SQLAlchemy.
https://factoryboy.readthedocs.io/en/stable/orms.html#sqlalchemy
The whole setup could look something like this:
import pytest
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from factory.alchemy import SQLAlchemyModelFactory
engine = create_engine( os.getenv("SQLALCHEMY_DATABASE_URI"))
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# this resets our tables in between each test
def _reset_schema():
db = SessionLocal()
for table in Base.metadata.sorted_tables:
db.execute(
'TRUNCATE {name} RESTART IDENTITY CASCADE;'.format(name=table.name)
)
db.commit()
#pytest.fixture
def test_db():
yield engine
engine.dispose()
_reset_schema()
#pytest.fixture
def session(test_db):
connection = test_db.connect()
transaction = connection.begin()
db = scoped_session(sessionmaker(bind=engine))
try:
yield db
finally:
db.close()
transaction.rollback()
connection.close()
db.remove()
class UserFactory(SQLAlchemyModelFactory):
class Meta:
model = models.User
first_name = 'John'
last_name = 'Doe'
admin = False
#pytest.fixture(autouse=True)
def provide_session_to_factories(session):
# usually you'd have one factory for each db table
for factory in [UserFactory, ...]:
factory._meta.sqlalchemy_session = session

Create global object in module shared accross imports Python

I have a database class in my module, db.py that I can only call once, my design goal is to have one single database object and have that be used across all other modules. So db has the layout
#db.py
Class DatabaseManager
def __init__():
# initialize engine and database
db = DatabaseManager()
The problem is across multiple imports db is reinitialized each time, what I want to be able to do is something along the lines of:
# polygon.py
from db import db
class Polygon:
def something(self):
db.commitChange(...)
# main.py
class GUIWindow:
def something(self):
db.getJSON(...)
How can I create one object for the entire program, and have all other modules importing db use that one object? I was under the impression db would not be reinitialized, but I am receiving the engine initialization output twice, here's an example and my output
# db.py
class DatabaseManager(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
print "hi"
db = DatabaseManager()
# polygon.py
from db import db
# main.py
from db import db
output:
hi
hi

Categories