How would one connect an SQL db securely to an external client? - python

I'm attempting to connect a database, located on a web server, to a robot but I do not know how to connect the database to the robot. I would like the robot to run SELECT and UPDATE queries from the robot. The other issue is that I do not intend on using C-languages or Java; I plan on using python in the main control system.
I do know:
PHP
VBScript
Batch
Python
If anyone knows how to connect the DB to a bot it would be a great help.

So basically how to connect to an SQL DB in python? I'm working on a virtual bot right now doing the same thing. Look into the module , SQL-connector! http://www.mysqltutorial.org/python-connecting-mysql-databases/
You would start with creating a config.ini with your credentials
[mysql]
host = localhost
database = python_mysql
user = root
password =
Read Config.ini and return a dictionary
from configparser import ConfigParser
def read_db_config(filename='config.ini', section='mysql'):
""" Read database configuration file and return a dictionary object
:param filename: name of the configuration file
:param section: section of database configuration
:return: a dictionary of database parameters
"""
# create parser and read ini configuration file
parser = ConfigParser()
parser.read(filename)
# get section, default to mysql
db = {}
if parser.has_section(section):
items = parser.items(section)
for item in items:
db[item[0]] = item[1]
else:
raise Exception('{0} not found in the {1} file'.format(section, filename))
return db
and connect to MYSQL database
from mysql.connector import MySQLConnection, Error
from python_mysql_dbconfig import read_db_config
def connect():
""" Connect to MySQL database """
db_config = read_db_config()
try:
print('Connecting to MySQL database...')
conn = MySQLConnection(**db_config)
if conn.is_connected():
print('connection established.')
else:
print('connection failed.')
except Error as error:
print(error)
finally:
conn.close()
print('Connection closed.')
if __name__ == '__main__':
connect()
and update statement would look like the following
def update_book(book_id, title):
# read database configuration
db_config = read_db_config()
# prepare query and data
query = """ UPDATE books
SET title = %s
WHERE id = %s """
data = (title, book_id)
try:
conn = MySQLConnection(**db_config)
# update book title
cursor = conn.cursor()
cursor.execute(query, data)
# accept the changes
conn.commit()
except Error as error:
print(error)
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
update_book(37, 'The Giant on the Hill *** TEST ***')

Related

Running a python script with prepared statement gives me no errors but doesn't do what I expect [duplicate]

This question already has answers here:
psycopg2 not actually inserting data
(2 answers)
Closed 2 months ago.
I am running this python code to read in quotes from a file and then insert each quote into a table in my database. When I run the script in my terminal there are no errors but when I go to the database table I tried to insert to it tells me there are 0 rows.
#!/usr/bin/python
import psycopg2
import re
from config import config
conn = psycopg2.connect(
host="localhost",
database="favorite_quotes",
user="postgres",
password="???")
data = None
with open('file.txt', 'r') as file:
data = file.read()
list_of_strings = re.findall('“(.+?)” \(.+?\)', data, re.DOTALL)
def insert_quotes():
""" Connect to the PostgreSQL database server """
conn = None
try:
# read connection parameters
params = config()
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
conn = psycopg2.connect(**params)
# create a cursor
cur = conn.cursor()
for str in list_of_strings:
cur.execute("INSERT INTO the_courage_to_be_disliked (quote) VALUES (%s)", [str])
# execute a statement
# dblist = list(cur.fetchone())
# quotes = []
# for row in cur:
# quotes.append(row[1])
# return quotes
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
insert_quotes()
You aren't committing your inserts, so they won't be visible from outside the current connection, and will be implicitly rolled back when the connection is closed.
TL;DR, add a call to conn.comimt() before calling conn.close().

ImportError: cannot import name 'config' & 'module' object is not callable using PostgreSQL & Python

Hey I am having trouble connecting to my PostgreSQL DB through Python. I am following this tutorial However when I attempt to run the code I return the following error:
Traceback (most recent call last):
File "/Users/username/Desktop/Coding/anybody/postgresqltest.py", line 2, in <module>
from config import config
ImportError: cannot import name 'config'
[Finished in 0.201s]
However, I have pip installed config (I am using a virtual environment) and verified using pip list
When I remove from config import config and replace with just import config I receive this error:
module' object is not callable [Finished in 0.127s]
Here is the code I have written, I am a bit stuck on what to do next (the actual database I am connecting to isn't called test btw, this is a different one):
import psycopg2
from configparser import ConfigParser
def config(filename='database.ini', section='postgresql'):
# create a parser
parser = ConfigParser()
# read config file
parser.read(filename)
# get section, default to postgresql
db = {}
if parser.has_section(section):
params = parser.items(section)
for param in params:
db[param[0]] = param[1]
else:
raise Exception('Section {0} not found in the {1} file'.format(section, filename))
return db
from config import config
def connect():
conn = None
try:
params = config()
print('Connecting to the PostgreSQL DB')
conn = psycopg2.connect(**params)
cur = conn.cursor()
print('PostgreSQL database version:')
cur.execute('Select version()')
db_version = cur.fetchnone()
print(db_version)
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
connect()
Put the config function into a separate .py file.
actually you don't need the config module at all in case you have the DB connection parameters:
here's the code that worked for my connection to DB hosted by Elephant (https://api.elephantsql.com/):
import psycopg2
#from config import config
conn = None
conn = psycopg2.connect(
host="your DB host",
database="DB NAME",
user="USER NAME",
password="DB PASSWORD")
try:
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
# create a cursor
cur = conn.cursor()
# execute a statement with cur.execute()
# in this case we get the version
print('PostgreSQL database version:')
cur.execute('SELECT version()')
# display the PostgreSQL database server version
db_version = cur.fetchone()
print(db_version)
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
main()
also, the docs by psycopg don't mention "config" module:
https://www.psycopg.org/docs/usage.html

Issues with connecting to Postgres DB via SSH in Python

I've been given a postgres DB in my uni project in which I have to SSH into the network from which I can then access the DB. I've set up the connection in DBeaver using the SSH tab and it works perfectly fine. However, using Python, I can connect to the SSH just fine, but cannot connect to the DB itself. I've checked with another DB that doesn't require SSH and that works just fine. Here is my code. Note: I've already tried using SSHTunnel, too, to no avail. Also, ignore my quick hack to anonymize my SSH login data, as I didn't find how to use a proper config file with paramiko late at night yesterday...
import os
from psycopg2 import connect, Error
from paramiko import SSHClient
from config import config
with open("ssh_config.txt", "r") as f:
lines = f.readlines()
hostname = lines[0].strip()
username = lines[1].strip()
password = lines[2].strip()
ssh = SSHClient()
ssh.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
ssh.connect(hostname=hostname, username=username, password=password)
print("SSH connected.")
try:
params = config()
conn = connect(**params)
cursor = conn.cursor()
print("DB connected.")
# Print PostgreSQL connection properties.
print(conn.get_dsn_parameters(), "\n")
# Print PostgreSQL version.
cursor.execute("SELECT version();")
record = cursor.fetchone()
print("You are connected to - ", record, "\n")
except (Exception, Error) as error:
print("Error while connecting to PostgreSQL", error)
Any help would be appreciated. Thanks!
I've figured it out myself. Here is the updated code. Basically, I had to forward the remote address to localhost and then connect to localhost instead of the DB address.
from psycopg2 import connect, Error
from sshtunnel import SSHTunnelForwarder
from config import config
with open("ssh_config.txt", "r") as f:
lines = f.readlines()
hostname = lines[0].strip()
username = lines[1].strip()
password = lines[2].strip()
remote_bind_address = lines[3].strip()
try:
with SSHTunnelForwarder(
(hostname, 22),
ssh_username=username,
ssh_password=password,
remote_bind_address=(remote_bind_address, 5432),
local_bind_address=("localhost", 8080)) \
as tunnel:
tunnel.start()
print("SSH connected.")
params = config()
conn = connect(**params)
cursor = conn.cursor()
print("DB connected.")
# Print PostgreSQL connection properties.
print(conn.get_dsn_parameters(), "\n")
# Print PostgreSQL version.
cursor.execute("SELECT version();")
record = cursor.fetchone()
print("You are connected to - ", record, "\n")
cursor.close()
conn.close()
tunnel.close()
print("DB disconnected.")
except (Exception, Error) as error:
print("Error while connecting to DB", error)

python mysql connection auto connect on error

I have a problem with my python mysql connection which I need help with.
My setup is two Pi's running servers on each one. One Pi (SolartPi) has Mysql database collecting data. The other pi (OfficePi) is connecting to the solarPi database to retrieve and update data over a network connection.
My main script works all ok until I have to reboot the SolarPi for a maintenance or power problem and the connection to the OfficePi is lost. The python script on the officePi then goes into a fault loop "2006, MYSQL Server has gone away" Below is a sample of this script.
import MySQLdb
connSolar = MySQLdb.connect("192.xxx.x.x", "external", "xxxxx", "xxxxx")
#eternal connection to solar pi database
cursSolar = connSolar.cursor()
while 1:
try:
cursSolar.execute("SELECT * FROM dashboard")
connSolar.commit()
for reading in cursSolar.fetchall():
heatingDemand = reading[2] #get heating demand from dB
print heatingDemand
except (MySQLdb.Error, MySQLdb.Warning) as e:
print (e)
connSolar.close()
So I tried rewriting this with the script from stackoverflow and a web site as shown below, but this now terminates the program when SolarPi is rebooted with the following error
_mysql_exceptions.OperationalError: (2003, 'Can\'t connect to MySQL server on \'192.xxx.x.x' (111 "Connection refused")')
import MySQLdb
class DB:
con = None
def connect(self):
self.conn = MySQLdb.connect("192.xxx.x.x", "xxxxx", "xxxxxx", "house") #eternal connection to solar pi database
def query(self, sql):
try:
cursor = self.conn.cursor()
cursor.execute(sql)
except (AttributeError, MySQLdb.OperationalError):
self.connect()
cursor = self.conn.cursor()
cursor.execute(sql)
return cursor
while 1:
db = DB()
sql = "SELECT * FROM dashboard"
cur = db.query(sql)
for reading in cur.fetchall():
heatingDemand = reading[2] #get heating demand from dB
print heatingDemand
Is there a way for the OfficePi to keep trying to connect to SolarPi mysql database when it has shut down.
Change your code to check a valid connection each loop otherwise pass:
import MySQLdb
class DB:
def connect(self):
try:
self.conn = MySQLdb.connect("192.xxx.x.x", "xxxxx", "xxxxxx", "house")
except (MySQLdb.Error, MySQLdb.Warning) as e:
print (e)
self.conn = None
return self.conn
def query(self, sql):
try:
cursor = self.conn.cursor()
cursor.execute(sql)
except (AttributeError, MySQLdb.OperationalError):
self.connect()
cursor = self.conn.cursor()
cursor.execute(sql)
return cursor
while 1:
db = DB()
conn = db.connect()
if conn:
sql = "SELECT * FROM dashboard"
cur = db.query(sql)
for reading in cur.fetchall():
heatingDemand = reading[2] #get heating demand from dB
print heatingDemand

AWS Lambda Python with MySQL

I'm trying to connect to mysql from my AWS Lambda script.I did pip install --allow-external mysql-connector-python mysql-connector-python -t <dir>
to install mysql-connector-python in local directory.
I zipped the file and uploaded it to AWS Lambda where my python files are being executed.
My scripts are executing correctly up to the point where I initialize a mysql connection.
I have this
log('about to set connection for db')
connection = mysql.connector.connect(user=DB_USER, password=DB_PASSWORD, host=DB_HOST, database=DB_DATABASE)
query = "SELECT * FROM customers WHERE customer_email LIKE '%s' LIMIT 1"
log('set connection for DB')
'about to set connection for db' is being logged but 'set connection for DB' is never logged and my program hits a timeout and stops executing.
What might I be doing wrong?
EDIT:
This is my class that I'm calling from lambda_function.py
import mysql.connector
import logging
from mysql.connector import errorcode
class MySql( object ):
USER =None
PASSWORD=None
HOST =None
DATABASE=None
logger = logging.getLogger()
logger.setLevel( logging.INFO )
def __init__(self, user, password, host, database):
global USER, PASSWORD, HOST, DATABASE
USER = user
PASSWORD = password
HOST = host
DATABASE = database
def getId( self, customer_email ):
email_exists = False
connection = mysql.connector.connect(user=USER, password=PASSWORD, host=HOST, database=DATABASE)
query = "SELECT * FROM customers WHERE customer_email LIKE '%s' LIMIT 1"
cursor = connection.cursor()
cursor.execute( query % customer_email )
data = cursor.fetchall()
id = None
for row in data :
id = row[1]
break
cursor.close()
connection.close()
return id
def insertCustomer( self, customer_email, id ):
log('about to set connection for db')
connection = mysql.connector.connect(user=USER, password=PASSWORD, host=HOST, database=DATABASE)
log('set connection for DB')
cursor = connection.cursor()
try:
cursor.execute("INSERT INTO customers VALUES (%s,%s)",( customer_email, id ))
connection.commit()
except:
connection.rollback()
connection.close()
def log( logStr):
logger.info( logStr )
def main():
user = 'xxx'
password = 'xxx'
host = ' xxx'
database = 'xxx'
mysql = MySql( user, password, host, database )
id = mysql.getId('testing')
if id == None:
mysql.insertCustomer('blah','blahblah')
print id
if __name__ == "__main__":
main()
When I execute the MySql.py locally my code works fine. My database gets updated but nothing happens when I run it from AWS.
Is it a MySQL instance on AWS (RDS) or on premise DB? If RDS, check the NACL inbound rules of the vpc associated with your DB instance. Inbound rules can allow/deny from specific IP sources
when you did a zip file create. Did you do a pip to target directory.
I have enclosed the syntax below.This copies the files to your target directory to zip.
That may be the reason you are able to execute it locally but not in a lambda.
This is the syntax
pip install module-name -t /path/to/PythonExampleDir

Categories