from fastapi import FastAPI
from pydantic import BaseModel
import os
from dotenv import load_dotenv
from neo4j import GraphDatabase
load_dotenv()
uri=os.getenv("uri")
user=os.getenv("user")
pwd=os.getenv("pwd")
class database(BaseModel):
db:str
def connection():
driver=GraphDatabase.driver(uri=uri,auth=(user,pwd))
return driver
app=FastAPI()
#app.post("/selectdb")
def selectdb(database:database):
driver_neo4j=connection()
session=driver_neo4j.session()
query = f":use {database.db}"
result = session.run(query)
return {"response: You have selected the database: " + database.db}
Is it not possible to make a query with :use in the Python Neo4j Driver, or have i done something wrong here?
Commands that start with a colon, like :use, are special commands that are understood only by the neo4j Browser.
When using a neo4j driver, you specify the name of the database you want to use when creating a Session. For example, see the with clause in this Python driver sample, which I excerpt here:
with driver.session(database="neo4j") as session:
records, summary = session.execute_read(get_people)
.
.
.
Related
I am trying to configure the tests. According to the tortoise orm documentation I create this test configuration file:
import pytest
from fastapi.testclient import TestClient
from tortoise.contrib.test import finalizer, initializer
import app.main as main
from app.core.config import settings
#pytest.fixture(scope="session", autouse=True)
def initialize_tests(request):
db_url = "postgres://USERNAME_HERE:SECRET_PASS_HERE#127.0.0.1:5432/test"
initializer(
[
"app.models",
],
db_url=db_url,
app_label="models"
)
print("initialize_tests")
request.add_finaliser(finalizer)
#pytest.fixture(scope="session")
def client():
app = main.create_application()
with TestClient(app) as client:
print("client")
yield client
And the test file looks like this:
def test_get(client):
response = client.get("/v1/url/")
assert response.status_code == 200
I try to run the tests, but I get this error:
asyncpg.exceptions._base.InterfaceError: cannot perform operation: another operation is in progress
I have found that some users don't use initializer and finalizer and do everything manually.
Testing in FastAPI using Tortoise-ORM
https://stackoverflow.com/a/66907531
But that doesn't look like the clear solution.
Question: Is there a way to make the tests work using initializer and finalizer?
I am trying to add some monitoring to a simple REST web service with flask and mongoengine and have come across what I think is a lack of understanding on my part of how imports and mongoengine is working in flask applications.
I'm following pymongo's documentation on monitoring : https://pymongo.readthedocs.io/en/3.7.2/api/pymongo/monitoring.html
I defined the following CommandListener in a separate file:
import logging
from pymongo import monitoring
log = logging.getLogger('my_logger')
class CommandLogger(monitoring.CommandListener):
def started(self, event):
log.debug("Command {0.command_name} with request id "
"{0.request_id} started on server "
"{0.connection_id}".format(event))
monitoring.register(CommandLogger())
I made an application_builder.py file to create my flask App, code looks something like this:
from flask_restful import Api
from flask import Flask
from command_logger import CommandLogger # <----
from db import initialize_db
from routes import initialize_routes
def create_app():
app = Flask(__name__)
api = Api(app)
initialize_db(app)
initialize_routes(api)
return app
The monitoring only seems to works if I import : CommandLogger in application_builder.py. I'd like to understand what is going on here, how does the import affect the monitoring registration?
Also I'd like to extract monitoring.register(CommandLogger()) as a function and call it at a latter stage in my code something like def register(): monitoring.register(CommandLogger())
But this doesn't seem to work, "registration' only works when it is in the same file as the CommandLogger class...
From the MongoEngine's doc, it seems important that the listener gets registered before connecting mongoengine
To use pymongo.monitoring with MongoEngine, you need to make sure that
you are registering the listeners before establishing the database
connection (i.e calling connect)
This worked for me. I'm just initializing/registering it the same way as I did other modules to avoid circular imports.
# admin/logger.py
import logging
from pymongo import monitoring
log = logging.getLogger()
log.setLevel(logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
class CommandLogger(monitoring.CommandListener):
# def methods...
class ServerLogger(monitoring.ServerListener):
# def methods
class HeartbeatLogger(monitoring.ServerHeartbeatListener):
# def methods
def initialize_logger():
monitoring.register(CommandLogger())
monitoring.register(ServerLogger())
monitoring.register(HeartbeatLogger())
monitoring.register(TopologyLogger())
# /app.py
from flask import Flask
from admin.toolbar import initialize_debugtoolbar
from admin.admin import initialize_admin
from admin.views import initialize_views
from admin.logger import initialize_logger
from database.db import initialize_db
from flask_restful import Api
from resources.errors import errors
app = Flask(__name__)
# imports requiring app
from resources.routes import initialize_routes
api = Api(app, errors=errors)
# Logger before db
initialize_logger()
# Database and Routes
initialize_db(app)
initialize_routes(api)
# Admin and Development
initialize_admin(app)
initialize_views()
initialize_debugtoolbar(app)
# /run.py
from app import app
app.run(debug=True)
then in any module...
from admin.logger import log
from db.models import User
# inside some class/view/queryset or however your objects are written...
log.info('Saving an item through MongoEngine...')
User(name='Foo').save()
What I'm trying to figure out now is how to integrate Flask DebuggerToolbar's Logging panel with the monitoring messages from these listeners...
i am new to the python boto3 function in AWS. I want to know how to search for particular serviceNameArn (u'arn:aws:ecs:us-east-1:778784494011:service/RITS-selenium-node-chrome-service-Service-5ZADFVZNNCFJ) among the list of services running in the AWS ECS cluster that i have below with the key value pair: {"Browser":"Chrome"}.
here is my code:
import boto3
from flask import Flask
from flask import request
from flask import jsonify, make_response
import requests
import json
browser='chrome'
CLUSTER = 'ECS-QEAUTOMATION-HYBRID-DEV'
client = boto3.client('ecs')
list_services = client.list_services(cluster=CLUSTER)
print(list_services['serviceArns'])
for x in list_services:
if browser in x:
x.servicename
print(x.servicename)
Output:
I am still receiving the list of all services running in the cluster except one:
[u'arn:aws:ecs:us-east-1:778784494011:service/RITS-selenium-hub-service-Service-1ESSGHC030KT6', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-selenium-node-chrome-service-Service-5ZADFVZNNCFJ', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-sonarqube-service-Service-1359LNU242V25', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-jmetermaster-service-Service-1JOAYPCN8KNZI', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-jmeterslave-service-Service-1PIAW69QGP9F8', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-selenium-node-firefox-service-Service-QVDLJQ423TX7', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-dashboard-service-Service-1T8VSPQ28ZAGO', u'arn:aws:ecs:us-east-1:778784494011:service/RITS-commandcenter-service-Service-1MVRK6EPDL3MN']
Try this. It looks like you were looping through list_services instead of the arns.
browser='chrome'
CLUSTER = 'ECS-QEAUTOMATION-HYBRID-DEV'
client = boto3.client('ecs')
list_services = client.list_services(cluster=CLUSTER)
service_arns = list_services['serviceArns']
print(service_arns )
for service_arn in service_arns:
if browser in service_arn:
print(service_arn)
Going through Admin -> Connections, we have the ability to create/modify a connection's params, but I'm wondering if I can do the same through API so I can programmatically set the connections
airflow.models.Connection seems like it only deals with actually connecting to the instance instead of saving it to the list. It seems like a function that should have been implemented, but I'm not sure where I can find the docs for this specific function.
Connection is actually a model which you can use to query and insert a new connection
from airflow import settings
from airflow.models import Connection
conn = Connection(
conn_id=conn_id,
conn_type=conn_type,
host=host,
login=login,
password=password,
port=port
) #create a connection object
session = settings.Session() # get the session
session.add(conn)
session.commit() # it will insert the connection object programmatically.
You can also add, delete, and list connections from the Airflow CLI if you need to do it outside of Python/Airflow code, via bash, in a Dockerfile, etc.
airflow connections --add ...
Usage:
airflow connections [-h] [-l] [-a] [-d] [--conn_id CONN_ID]
[--conn_uri CONN_URI] [--conn_extra CONN_EXTRA]
[--conn_type CONN_TYPE] [--conn_host CONN_HOST]
[--conn_login CONN_LOGIN] [--conn_password CONN_PASSWORD]
[--conn_schema CONN_SCHEMA] [--conn_port CONN_PORT]
https://airflow.apache.org/cli.html#connections
It doesn't look like the CLI currently supports modifying an existing connection, but there is a Jira issue for it with an active open PR on GitHub.
AIRFLOW-2840 - cli option to update existing connection
https://github.com/apache/incubator-airflow/pull/3684
First check if connection exists, after create new Connection using from airflow.models import Connection :
import logging
from airflow import settings
from airflow.models import Connection
def create_conn(conn_id, conn_type, host, login, pwd, port, desc):
conn = Connection(conn_id=conn_id,
conn_type=conn_type,
host=host,
login=login,
password=pwd,
port=port,
description=desc)
session = settings.Session()
conn_name = session.query(Connection).filter(Connection.conn_id == conn.conn_id).first()
if str(conn_name) == str(conn.conn_id):
logging.warning(f"Connection {conn.conn_id} already exists")
return None
session.add(conn)
session.commit()
logging.info(Connection.log_info(conn))
logging.info(f'Connection {conn_id} is created')
return conn
You can populate connections using environment variables using the connection URI format.
The environment variable naming convention is AIRFLOW_CONN_<conn_id>, all uppercase.
So if your connection id is my_prod_db then the variable name should be AIRFLOW_CONN_MY_PROD_DB.
In general, Airflow’s URI format is like so:
my-conn-type://my-login:my-password#my-host:5432/my-schema?param1=val1¶m2=val2
Note that connections registered in this way do not show up in the Airflow UI.
To use session = settings.Session(), it assumes the airflow database backend has been initiated. For those who haven't set it up for your development environment, a hybrid method using both Connection class and environment variables will be a workaround.
Below is the example for setting up a S3Hook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.models.connection import Connection
import os
import json
aws_default = Connection(
conn_id="aws_default",
conn_type="aws",
login='YOUR-AWS-KEY-ID',
password='YOUR-AWS-KEY-SECRET',
extra=json.dumps({'region_name': 'us-east-1'})
)
os.environ["AIRFLOW_CONN_AWS_DEFAULT"] = aws_default.get_uri()
s3_hook = S3Hook(aws_conn_id='aws_default')
s3_hook.list_keys(bucket_name='YOUR-BUCKET', prefix='YOUR-FILENAME')
I'm having trouble with the MongoHQ Heroku addon. Locally my app works and the os variable is present and well-formed on Heroku. However, when I attempt to access the db it throws an error: OperationFailure: database error: unauthorized db:my_database ns:my_database.cars lock type:0 client:128.62.187.133. If I try to hard-code the connection string from MongoHQ and run locally, I get the same error.
My app is below:
import os
import datetime
from flask import Flask
from flask import g
from flask import jsonify
from flask import json
from flask import request
from flask import url_for
from flask import redirect
from flask import render_template
from flask import make_response
import pymongo
from pymongo import Connection
from bson import BSON
from bson import json_util
app = Flask(__name__)
def mongo_conn():
# Format: MONGOHQ_URL: mongodb://<user>:<pass>#<base_url>:<port>/<url_path>
if os.environ.get('MONGOHQ_URL'):
return Connection(os.environ['MONGOHQ_URL'])
else:
return Connection()
#app.route('/', methods=['GET', 'POST'])
def hello():
# Get your DB
connection = mongo_conn()
db = connection.my_database
# Create an object
car = {"brand": "Ford",
"model": "Mustang",
"date": datetime.datetime.utcnow()}
# Get your collection
cars = db.cars # crashes
# Insert it
cars.insert(car)
...
Edit: MongoHQ support helped me. Problem was that I was calling my database my_database instead of the actual DB name given to me by the MongoHQ addon. E.g., db = connection.app52314314. That change fixed it.
You likely need to run the authenticate command against the DB directly after you connect.
Try something like this:
db.authenticate([USER], [PASSWORD])
If that doesn't work, feel free to email support#mongohq.com and we can help you out with your specific DB.
You don't need to do all that. You can simply:
from pymongo import MongoClient
client = MongoClient(os.environ['MONGOHQ_URL'])
mongo_db = client.get_default_database()
It will automatically authenticate you, and connect to the provisioned database, the <url_path> part of your connection url.