I'm having trouble creating a global function accessible from within all classes. I receive an error from within user.py that says:
NameError: global name 'connectCentral' is not defined
Here is my current code.
project/model/__ init __.py:
"""The application's model objects"""
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy import engine_from_config
from pylons import config
import central
#Establish an on-demand connection to the central database
def connectCentral():
engine = engine_from_config(config, 'sqlalchemy.central.')
central.engine = engine
central.Session.configure(bind=engine)
project/model/user.py
import project.model
class User(object):
def emailExists(self):
try:
connectCentral()
emails = central.Session.query(User).filter_by(email=self.email).count()
if (emails > 0):
return False
else:
return True
except NameError:
self.errors['email'] = 'E-Mail not set'
return False
Am I missing an import? Is there a better way to do this?
Thanks.
You need to qualify the name with the module (or package) it's in, so:
try:
project.model.connectCentral()
etc.
Related
I want to build a well-modularized python project, where all alternative modules should be registed and acessed via a function named xxx_builder.
Taking data class as an example:
register.py:
def register(key, module, module_dict):
"""Register and maintain the data classes
"""
if key in module_dict:
logger.warning(
'Key {} is already pre-defined, overwritten.'.format(key))
module_dict[key] = module
data_dict = {}
def register_data(key, module):
register(key, module, data_dict)
data.py:
from register import register_data
import ABCDEF
class MyData:
"""An alternative data class
"""
pass
def call_my_data(data_type):
if data_type == 'mydata'
return MyData
register_data('mydata', call_my_data)
builder.py:
import register
def get_data(type):
"""Obtain the corresponding data class
"""
for func in register.data_dict.values():
data = func(type)
if data is not None:
return data
main.py:
from data import MyData
from builder import get_data
if __name__ == '__main__':
data_type = 'mydata'
data = get_data(type=data_type)
My problem
In main.py, to register MyData class into register.data_dict before calling the function get_data, I need to import data.py in advance to execute register_data('mydata', call_my_data).
It's okay when the project is small, and all the data-related classes are placed according to some rules (e.g. all data-related class should be placed under the directory data) so that I can import them in advance.
However, this registeration mechanism means that all data-related classes will be imported, and I need to install all packages even if I won't use it actually. For example, when the indicator data_type in main.py is not mydata I still need to install ABCDEF package for the class MyData.
So is there any good idea to avoid importing all the packages?
Python's packaging tools come with a solution for this: entry points. There's even a tutorial about how to use entry points for plugins (which seems like what you're doing) (in conjunction with this Setuptools tutorial).
IOW, something like this (nb. untested), if you have a plugin package that has defined
[options.entry_points]
myapp.data_class =
someplugindata = my_plugin.data_classes:SomePluginData
in setup.cfg (or pypackage.toml or setup.py, with their respective syntaxes), you could register all of these plugin classes (here shown with an example with a locally registered class too).
from importlib.metadata import entry_points
data_class_registry = {}
def register(key):
def decorator(func):
data_class_registry[key] = func
return func
return decorator
#register("mydata")
class MyData:
...
def register_from_entrypoints():
for entrypoint in entry_points(group="myapp.data_class"):
register(entrypoint.name)(entrypoint.load())
def get_constructor(type):
return data_class_registry[type]
def main():
register_from_entrypoints()
get_constructor("mydata")(...)
get_constructor("someplugindata")(...)
Looking at the docs I got to use my app settings in this way:
import config
...
#router.post('')
async def my_handler(
...
settings: config.SettingsCommon = fastapi.Depends(config.get_settings),
):
...
But I am not satisfied with repeating everywhere import config, config.get_settings.
Is there a way to use settings in my handlers without repeating myself?
Because FastAPI cares about helping you minimize code repetition.
You can use Class Based Views from the fastapi_utils package.
As an example:
from fastapi import APIRouter, Depends, FastAPI
from fastapi_utils.cbv import cbv
from starlette import requests
from logging import Logger
import config
from auth import my_auth
router = APIRouter(
tags=['Settings test'],
dependencies=[Depends(my_auth)] # injected into each query, but my_auth return values are ignored, throw Exceptions
)
#cbv(router)
class MyQueryCBV:
settings: config.SettingsCommon = Depends(config.get_settings) # you can get settings here
def __init__(self, r: requests.Request): # called for each query, after their dependencies have been evaluated
self.logger: Logger = self.settings.logger
self.logger.warning(str(r.headers))
#router.get("/cbv/{test}")
def test_cbv(self, test: str):
self.logger.warning(f"test_cbv: {test}")
return "test_cbv"
#router.get("/cbv2")
def test_cbv2(self):
self.logger.warning(f"test_cbv2")
return "test_cbv2"
It's not currently possible to inject global dependencies. You can still declare them and the code inside the dependencies will run as normal.
Docs on global dependencies for reference.
Without any external dependency, I can think of three ways of using global dependencies. You can set a private variable with your dependency and get that dependency using a function.
You can also use the same approach without a global private variable, but instead using a cache decorator (docs here).
Finally, you can implement the singleton pattern if using a class as a dependency.
Something like:
class Animal:
_singleton = None
#classmethod
def singleton(cls) -> "Animal":
if cls._singleton is None:
cls._singleton = Animal()
return cls._singleton
I have a file called redis_db.py which has code to connect to redis
import os
import redis
import sys
class Database:
def __init__(self, zset_name):
redis_host = os.environ.get('REDIS_HOST', '127.0.0.1')
redis_port = os.environ.get('REDIS_PORT', 6379)
self.db = redis.StrictRedis(host=redis_host, port=redis_port)
self.zset_name = zset_name
def add(self, key):
try:
self.db.zadd(self.zset_name, {key: 0})
except redis.exceptions.ConnectionError:
print("Unable to connect to redis host.")
sys.exit(0)
I have another file called app.py which is like this
from flask import Flask
from redis_db import Database
app = Flask(__name__)
db = Database('zset')
#app.route('/add_word/word=<word>')
def add_word(word):
db.add(word)
return ("{} added".format(word))
if __name__ == '__main__':
app.run(host='0.0.0.0', port='8080')
Now I am writing unit test for add_word function like this
import unittest
import sys
import os
from unittest import mock
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../api/")
from api import app # noqa: E402
class Testing(unittest.TestCase):
def test_add_word(self):
with mock.patch('app.Database') as mockdb:
mockdb.return_value.add.return_value = ""
result = app.add_word('shivam')
self.assertEqual(result, 'shivam word added.')
Issue I am facing is that even though I am mocking the db method call it is still calling the actual method in the class instead of returning mocked values and during testing I am getting error with message Unable to connect to redis host..
Can someone please help me in figuring out how can I mock the redis database calls.
I am using unittest module
The issue is that db is defined on module import, so the mock.patch does not affect the db variable. Either you move the instantiation of
db in the add_word(word) function or you patch db instead of Database, e.g.
def test_add_word():
with mock.patch('api.app.db') as mockdb:
mockdb.add = mock.MagicMock(return_value="your desired return value")
result = app.add_word('shivam')
print(result)
Note that the call of add_word has to be in the with block, otherwise the unmocked version is used.
I'm trying to run Flask from an imported module (creating a wrapper using decorators).
Basically I have:
app.py:
import mywrapper
#mywrapper.entrypoint
def test():
print("HEYO!")
mywrapper.py
from flask import Flask
ENTRYPOINT = None
app = Flask(__name__)
#app.route("/")
def listen():
"""Start the model API service"""
ENTRYPOINT()
def entrypoint(f):
global ENTRYPOINT
ENTRYPOINT = f
return f
FLASK_APP=app
Running python -m flask, however, results in:
flask.cli.NoAppException: Failed to find Flask application or factory in module "app". Use "FLASK_APP=app:name to specify one.
Is there any trick to getting Flask to run like this? Or is it just not possible? The purpose of this is to abstract Flask away in this situation.
In my head flask should try to import mywrapper.py, which imports app.py which should generate the app and route, yet this doesn't seem to be what occurs.
Any help would be appreciated.
So I've since learnt that Flask searches only in the chosen module's namespace for a variable containing a Flask object.
There may be a smart way to avoid this limitation, but I instead decided that it was more sensible to instead just wrap the Flask class itself. If people want direct Flask functionality, I don't really care in this situation, so the only real limitation I have from this is some function names are off limits.
Basically:
wrapper.py:
class Wrapper(Flask):
def __init__(self, name):
super().__init__(name)
self.entrypoint_func = None
#self.route("/")
def listen():
return self.entrypoint_func()
def entrypoint(self, f):
assert self.entrypoint_func is None, "Entrypoint can only be set once"
self.entrypoint_func = f
return f
and app.py:
from mywrapper import Wrapper
app = Wrapper(__name__)
#app.entrypoint
def test():
print("HEYO!")
return "SUCCESS"
This is still abstracted enough that I am happy with the results.
I am having difficulty with a circular import problem. I have 2 class files like so:
--/service
service_module.py
settings.py
service_module imports other various files used throughout the project and acts as a container for various functions throughout the project. I want to assert in my settings.py file that it is properly passed an instance of the service_module parent class. Removing the assert statement in settings.py fixes the issue and I am able to properly call methods in the service_module class. For code completion and error checking it makes my life easier to assert.
I have always struggled with understanding python imports but is this the right direction to handle my particular case?
service_module.py
from PyQt5.QtCore import QObject
from sqlalchemy import *
from sqlalchemy.orm import scoped_session, Session, sessionmaker
from service.logger import logger
from sqlalchemy.orm.exc import NoResultFound
from database.tables import *
from database.load_db import load_db
from service.settings import settings
from service.web_listener import web_listener
from service.character_manager import character_manager
class Service_Module(QObject):
def __init__(self):
super(Service_Module, self).__init__()
load_database = load_db()
self.sc_session: scoped_session = load_database.get_scoped_session()
tb_scopes.make_default_scopes(service_module=self)
self.logger = logger(service_module=self)
self.settings = settings(service_module=self)
self.characters = character_manager(service_module=self)
self.callback_listener: web_listener = web_listener(service_module=self)
self.callback_listener.start()
assert isinstance(self.sc_session, scoped_session)
assert isinstance(self.logger, logger)
assert isinstance(self.settings, settings)
assert isinstance(self.callback_listener, web_listener)
settings.py
from service.service_module import *
class settings(QObject):
def __init__(self, service_module):
super(settings, self).__init__()
self.service = service_module
assert isinstance(self.service, Service_Module) ##raises NameError: name 'Service_Module' is not defined
Edit:
So changing to this solves my issue although I feel like it's kind of hacky and somehow incorrect.
from service.service_module import *
import service.service_module
class settings(QObject):
def __init__(self, service_module):
super(settings, self).__init__()
self.service = service_module
assert isinstance(self.service, service.service_module.Service_Module)
You need to delay the importing of one (or both) of the files, to break the circular import. You can do this by moving the import statement from file-scope (where it is executed as soon as the module is imported) into the execution scope of a function or method, where is not executed until that method is called. Ie)
class setting(QObject):
def __init__(self, service_module):
from service.service_module import Service_Module # <-- import is here
assert isinstance(self.service, Service_Module)
Of course, this may affect other usages of the imported symbols from that module into this file, so you may need to specify the import in more than one place.