I am trying to connect to a server remotely and then access it's local database with Python. I am successfully connecting to the server, although I can't seem to connect to the database on the server. My code is below:
import psycopg2
from sshtunnel import SSHTunnelForwarder
try:
with SSHTunnelForwarder(
('<server ip address>', 22),
ssh_private_key="</path/to/private/ssh/key>",
ssh_username="<server username>",
remote_bind_address=('localhost', 5432)) as server:
print "server connected"
conn = psycopg2.connect(database="<dbname>",port=server.local_bind_port)
curs = conn.cursor()
print "database connected
except:
print "Connection Failed"
These are pieces of code I have found on the internet and pieced together. I have also tried the connection statements below in place of the code above:
params = {
'database': '<dbname>',
'user': '<dbusername>',
'password': '<dbuserpass>',
'host': 'localhost',
'port': 5432
}
conn = psycopg2.connect(**params)
I know I can connect to the database because on my machine; I am able to use sqlectron to tunnel in and connect appropriately.
Just in case it is not clear what I am trying to do from above, I need to ssh tunnel into my remote server using a private ssh key on my computer (working properly), and then I need to connect to a PostgreSQL database that is on localhost at port 5432.
I am currently getting the current error message for both ways of trying to connect:
2016-01-23 11:16:10,978 | ERROR | Tunnel: 0.0.0.0:49386 <> localhost:5432 error: (9, 'Bad file descriptor')
I don't know if this may be helpful, but I had to connect to a PostgreSQL database through SSH tunneling as well. I succeeded to connect using your code with some modifications:
import psycopg2
from sshtunnel import SSHTunnelForwarder
try:
with SSHTunnelForwarder(
('<server ip address>', 22),
#ssh_private_key="</path/to/private/ssh/key>",
### in my case, I used a password instead of a private key
ssh_username="<server username>",
ssh_password="<mypasswd>",
remote_bind_address=('localhost', 5432)) as server:
server.start()
print "server connected"
params = {
'database': '<dbname>',
'user': '<dbusername>',
'password': '<dbuserpass>',
'host': 'localhost',
'port': server.local_bind_port
}
conn = psycopg2.connect(**params)
curs = conn.cursor()
print "database connected"
except:
print "Connection Failed"
After adding server.start(), the code worked nicely. Furthermore, inverted commas were missing after 'database connected'.
I hope this might be helpful to you, thanks for sharing your code!
Both these examples were very helpful. I just needed to combine the good parts from both.
from sshtunnel import SSHTunnelForwarder #Run pip install sshtunnel
from sqlalchemy.orm import sessionmaker #Run pip install sqlalchemy
from sqlalchemy import create_engine
with SSHTunnelForwarder(
('<remote server ip>', 22), #Remote server IP and SSH port
ssh_username = "<username>",
ssh_password = "<password>",
remote_bind_address=('<local server ip>', 5432)) as server: #PostgreSQL server IP and sever port on remote machine
server.start() #start ssh sever
print 'Server connected via SSH'
#connect to PostgreSQL
local_port = str(server.local_bind_port)
engine = create_engine('postgresql://<username>:<password>#127.0.0.1:' + local_port +'/database_name')
Session = sessionmaker(bind=engine)
session = Session()
print 'Database session created'
#test data retrieval
test = session.execute("SELECT * FROM database_table")
for row in test:
print row['id']
session.close()
Related
I am trying to use Python to connect to a PostgreSQL instance, which is located on Azure through an SSH tunnel. I can connect to the database with DBeaver with no Problem.
Here is the code that I am using.
from sshtunnel import SSHTunnelForwarder
server = SSHTunnelForwarder(
('160.**.**.**', 22),
ssh_username="*******",
ssh_password="*******",
remote_bind_address=('localhost', 5432))
server.start()
print("server connected")
params = {
'database': '*******',
'user': '*****postgresadmin#*****dev-postgres',
'password': '************',
'host': '**********-postgres.postgres.database.azure.com',
'port': server.local_bind_port
}
conn = psycopg2.connect(**params)
cur = conn.cursor()
text = "select * from table"
cur.execute(text)
However I get the following error:
conn = _connect(dsn, connection_factory=connection_factory, **kwasync) psycopg2.OperationalError: could not translate host name "**********-postgres.postgres.database.azure.com" to address: Unknown host
I also tried sqlalchemy using this with the same result.
Any idea on what I am doing wrong? Do I maybe need the IP-address of the host instead of the domain?
The SSHTunnelForwarder is used if you want to do some stuff on the remote server.
Another code block is needed if you need to use remote server as a bridge to connect to another server:
import sshtunnel
with sshtunnel.open_tunnel(
(REMOTE_SERVER_IP, 443),
ssh_username="",
ssh_password="*******",
remote_bind_address=(AZURE_SERVER_HOST, 22),
local_bind_address=('0.0.0.0', 10022)
) as tunnel:
params = {
'database': '*******',
'user': '*****postgresadmin#*****dev-postgres',
'password': '************',
'host': '127.0.0.1',
'port': 10022
}
conn = psycopg2.connect(**params)
cur = conn.cursor()
text = "select * from table"
cur.execute(text)
I have a remote server containing a database whose address is private. When i run the script i get
server connected
Connection Failed
import psycopg2
from sshtunnel import SSHTunnelForwarder
import os
try:
with SSHTunnelForwarder(
('Remote_server_ip', Remote_server_port),
ssh_private_key="/home/User/.ssh/id_rsa",
ssh_username="xxxxx",
remote_bind_address=(Database_private_ip, Database_private_port)) as server:
server.start()
print ("server connected")
params = {
'database': 'xxxxx',
'user': 'xxxxx',
'password': 'xxxxxx',
'host': 'localhost',
'port': PORT
}
conn = psycopg2.connect(**params)
curs = conn.cursor()
print ("database connected")
except:
print ("Connection Failed")
I'm thinking port forwarding is still not being executed so that i can be able to access the db thus why i don't get database connected
How do i solve this?
Hi I have a shared hosting i bought and it allows for remote MySQL connection only with SSH.
So far I know that it doesn't have any Public or Private Keys..
And here's my connection setup on the MySQL Workbench which works when I try to connect:
I have looked at another stackoverflow question: Here but none of the answers seems to work for me.. :/ I'm really at a dead end and I need to get this to work. Can someone help me out please?
So I figured it out with like a million trial and error:
import pymysql
import paramiko
import pandas as pd
from paramiko import SSHClient
from sshtunnel import SSHTunnelForwarder
ssh_host = '198.54.xx.xx'
ssh_host_port = 21098 #Ur SSH port
ssh_username = "sshuser123" #Change this
ssh_password = "sshpassword123" #Change this
db_user = 'db user' #change this
db_password = 'password123' #change this
db = 'main_db' #The db that the user is linked to
with SSHTunnelForwarder(
(ssh_host, ssh_host_port),
ssh_username=ssh_username,
ssh_password=ssh_password,
remote_bind_address=('127.0.0.1', 3306)) as tunnel:
conn = pymysql.connect(host='127.0.0.1', user=db_user,
passwd=db_password, db=db,
port=tunnel.local_bind_port)
query = '''SELECT * from tablename;'''
data = pd.read_sql_query(query, conn)
print(data)
conn.close()
This is the code you should use if your SSH on MySql doesn't have any Public / Private Key.
Hope this helps anyone facing the same issue!!
Connect to server 198.54.x.240:21098 via ssh with port-forwarding
like ssh -t -gL 33069:localhost:3306 198.54.x.240
in windows use PuTTY, i like KiTTy (fork putty )
add connection and SSH Tunnel look at the pictures
Connect to MySQL via localhost:33069 (answer you know)WorkBench do the same, but 3306 on 3306, if you need more than 1 remote connection best practice forward different porst.
I have a database on a server which I need to access through SSH. Right now I deal with the DB by using the command line to get the data.
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname='XX.XX.XX', username='user', password='pass', port = YYY)
query = "mysql -u " + username_sql + " -p" + password_sql +" dbb -e \"" + sql_query + "\""
ssh.exec_command(query.decode('string_escape'))
ssh.close()
Is there a way to do this with SQLAlchemy to be more efficient and so I can work with pandas DataFrames directly?
from sqlalchemy import create_engine
engine = create_engine(
"mysql://username_sql:password_sql#localhost/dbb")
In case there is anyone who's interested in connecting to a remote Postgresql database via SSH and wants to load data into a pandas DataFrame here is how to do it.
Suppose we have installed a postgresql database on a remote server, to which we can ssh by the following parameters.
SSH parameters:
Server's ip: 10.0.0.101
SSH port: 22 (default port for SSH)
Username: my_username
Password: my_password
Database parameters:
Port: 5432 (postgresql default port)
Database name: db
Database user: postgres_user (default username is postgres)
Database password: postgres_pswd (default password is an empty string)
Table with our data: MY_TABLE
Now, we want to connect to this database on our end and load data into a pandas DataFrame:
from sshtunnel import SSHTunnelForwarder
from sqlalchemy import create_engine
import pandas as pd
server = SSHTunnelForwarder(
('10.0.0.101', 22),
ssh_username="my_username",
ssh_password="my_password",
remote_bind_address=('127.0.0.1', 5432)
)
server.start()
local_port = str(server.local_bind_port)
engine = create_engine('postgresql://{}:{}#{}:{}/{}'.format("postgres_user", "postgres_pswd", "127.0.0.1", local_port, "db"))
dataDF = pd.read_sql("SELECT * FROM \"{}\";".format("MY_TABLE"), engine)
server.stop()
The easiest way to do this would be to run an SSH tunnel to the mysql port on the remote host. For example:
ssh -f user#XX.XX.XX.XX -L 3307:mysql1.example.com:3306 -N
Then connect locally with SQLAlchemy:
engine = create_engine("mysql://username_sql:password_sql#localhost:3307/dbb")
If you really want to use paramiko, try this demo code in the paramiko repo or the sshtunnel module. The ssh command might be the easiest method though.. and you can use autossh to restart the tunnel if it goes down.
You could use the SSHTunnel library as follows:
from sshtunnel import SSHTunnelForwarder #Run pip install sshtunnel
from sqlalchemy.orm import sessionmaker #Run pip install sqlalchemy
with SSHTunnelForwarder(
('10.160.1.24', 22), #Remote server IP and SSH port
ssh_username = "<usr>",
ssh_password = "<pwd>",
remote_bind_address=('127.0.0.1', 5432)
) as server:
server.start() #start ssh sever
print 'Server connected via SSH'
#connect to PostgreSQL
local_port = str(server.local_bind_port)
engine = create_engine('postgresql://<db_user>:<db_pwd>#127.0.0.1:' + local_port +'/<db_name>')
Session = sessionmaker(bind=engine)
session = Session()
print 'Database session created'
#test data retrieval
test = session.execute("SELECT * FROM <table_name>")
Just swap the (host, port) of the server with postgres:
from sshtunnel import SSHTunnelForwarder #Run pip install sshtunnel
server = SSHTunnelForwarder(
(<'your host'>, <host port>),
ssh_username=<"os remote username">,
ssh_pkey=<'path/to/key.pem'>, # or ssh_password.
remote_bind_address=(<'postgres db host'>, <'postgres db port'>))
server.start()
connection_data = 'postgresql://{user}:{password}#{host}:{port}/{db}'.format(user=<'postgres user'>,
password=<'postgres password'>,
host=server.local_bind_host,
port=server.local_bind_port,
db=<'postgres db name'>)
engine = create_engine(connection_data)
# Do your queries
server.stop()
I'm going to piggy back #Matin Kh with a non-postgresql db - MySQL and using Pythonanywhere.com.
This code will take a table and convert it to an excel file.
import sshtunnel
import sqlalchemy
import pymysql
import pandas as pd
from pandas import ExcelWriter
import datetime as dt
from sshtunnel import SSHTunnelForwarder
server = SSHTunnelForwarder(
('ssh.pythonanywhere.com'),
ssh_username='username',
ssh_password='password',
remote_bind_address=('username.mysql.pythonanywhere-services.com', 3306) )
server.start()
local_port = str(server.local_bind_port)
db = 'username$database'
engine = sqlalchemy.create_engine(f'mysql+pymysql://username:password#127.0.0.1:{local_port}/{db}')
print('Engine Created')
df_read = pd.read_sql_table('tablename',engine)
print('Grabbed Table')
writer = ExcelWriter('excelfile.xlsx')
print('writer created')
df_read.to_excel(writer,'8==D') # '8==D' specifies sheet
print('df to excel')
writer.save()
print('saved')
server.stop()
I am trying to connect my python program to a remote MySQL Database via SSH.
I am using Paramiko for SSH and SQLAlchemy.
Here is what I have so far:
import paramiko
from sqlalchemy import create_engine
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect('host', port=port, username='user', password='pass')
engine = create_engine('mysql+mysqldb://user:pass#host/db')
I am getting an error:
sqlalchemy.exc.OperationalError: (_mysql_exceptions.OperationalError) (2003, "Can't connect to MySQL server on 'mcsdev.croft-it.com' (60)")
Sorry I posted a duplicated answer before. Here is a more elaborated answer tailored exactly to your question ;)
If you still in need of connecting to a remote MySQL db via SSH I have used a library named sshtunnel, that wraps ands simplifies the use of paramiko (a dependency of the sshtunnel).
With this code I think you will be good to go:
from sshtunnel import SSHTunnelForwarder
from sqlalchemy import create_engine
server = SSHTunnelForwarder(
('host', 22),
ssh_password="password",
ssh_username="username",
remote_bind_address=('127.0.0.1', 3306))
server.start()
engine = create_engine('mysql+mysqldb://user:pass#127.0.0.1:%s/db' % server.local_bind_port)
# DO YOUR THINGS
server.stop()
This code work for me
import pymysql
import paramiko
from paramiko import SSHClient
from sshtunnel import SSHTunnelForwarder
from sqlalchemy import create_engine
#ssh config
mypkey = paramiko.RSAKey.from_private_key_file('your/user/location/.ssh/id_rsa')
ssh_host = 'your_ssh_host'
ssh_user = 'ssh_host_username'
ssh_port = 22
#mysql config
sql_hostname = 'your_mysql_host name'
sql_username = 'mysql_user'
sql_password = 'mysql_password'
sql_main_database = 'your_database_name'
sql_port = 3306
host = '127.0.0.1'
with SSHTunnelForwarder(
(ssh_host, ssh_port),
ssh_username=ssh_user,
ssh_pkey=mypkey,
remote_bind_address=(sql_hostname, sql_port)) as tunnel:
engine = create_engine('mysql+pymysql://'+sql_username+':'+sql_password+'#'+host+':'+str(tunnel.local_bind_port)+'/'+sql_main_database)
connection = engine.connect()
print('engine creating...')
sql = text(""" select * from nurse_profiles np limit 50""")
nurseData = connection.execute(sql)
connection.close()
nurseList = []
for row in nurseData:
nurseList.append(dict(row))
print('nurseList len: ', len(nurseList))
print('nurseList: ', nurseList)
Using external ubuntu server
With ssh key on digital ocean
The accepted answer did not work for me, I had to specify the ssh_private_key, which is the path to your private key
from sqlalchemy import create_engine
from sshtunnel import SSHTunnelForwarder
server = SSHTunnelForwarder(
('133.22.166.19', 22),
ssh_password="123ABC123",
ssh_username="erfan",
ssh_private_key=r'C:\Users\Erfan\.ssh\id_rsa',
remote_bind_address=('127.0.0.1', 3306)
)
server.start()
engine = create_engine(
f'mysql+mysqldb://root:safepassword123#127.0.0.1:{server.local_bind_port}'
)
dbs = engine.execute('SHOW DATABASES;')
for db in dbs:
print(db)
server.stop()