Psycopg2 access PostgreSQL database on remote host without manually opening ssh tunnel - python

My standard procedure for accessing a PostgreSQL database on a remote server is to first create an ssh tunnel as:
ssh username1#remote.somewhere.com -L 5432:localhost:5432 -p 222
and then run my query in python from another shell as:
conn = psycopg2.connect("host=localhost" + " dbname=" +
conf.dbname + " user=" + conf.user +
" password=" + conf.password)
cur = conn.cursor()
cur.execute(query)
This piece of python code works nicely once the tunnel is created. However, I would like psycopg2 to already open the SSH tunnel or reach "somehow" the remote database without need to redirect it on my localhost.
Is it possible to do this with psycopg2?
Is otherwise possible open the ssh tunnel in my python code?
if I use:
os.system("ssh username1#remote.somewhere.com -L 5432:localhost:5432 -p 222")
The shell will be redirected to the remote host blocking the execution of main thread.

You could also use sshtunnel, short and sweet:
from sshtunnel import SSHTunnelForwarder
PORT=5432
with SSHTunnelForwarder((REMOTE_HOST, REMOTE_SSH_PORT),
ssh_username=REMOTE_USERNAME,
ssh_password=REMOTE_PASSWORD,
remote_bind_address=('localhost', PORT),
local_bind_address=('localhost', PORT)):
conn = psycopg2.connect(...)

With sshtunnel package
I was not familiar with SSH tunnels, so I had some difficulties to use mrts's answer.
Maybe thoses precisions could help someone.
In psycopg2.connect(), host and port are the one you just created by connecting remote host by ssh tunnel.
Here is my code :
from sshtunnel import SSHTunnelForwarder
server = SSHTunnelForwarder((REMOTE_HOST, REMOTE_SSH_PORT),
ssh_username=REMOTE_USERNAME,
ssh_password=REMOTE_PASSWORD,
remote_bind_address=('localhost', PORT),
local_bind_address=('localhost', PORT))
server.start()
import psycopg2
conn = psycopg2.connect(
database=DATABASE,
user=USER,
host=server.local_bind_host,
port=server.local_bind_port,
password=PWD)
cur = conn.cursor()
cur.execute("select * from yourtable limit 1;")
data = cur.fetchall()
print(data)
I hope this example make it clearer.

Call your ssh via os.system in a separate thread/process. You can also use -N with ssh to avoid opening a remote shell.

Clodoaldo Neto's code worked for me perfectly but beware it doesn't clean up the process afterward.
The method shown by Luca Fiaschi also works for me. I updated it a bit for python3 and the updated psutil module. The changes were just that process.username and process.cmdline are now functions and that the iterator is process_iter() instead of get_process_list().
Here is an example of a very slightly modified version of the code Luca Fiaschi posted that works with python3 (requires psutil module). I hope it is at least mostly correct!
#!/usr/bin/env python3
import psutil
import psycopg2
import subprocess
import time
import os
# Tunnel Config
SSH_HOST = "111.222.333.444"
SSH_USER = "user"
SSH_KEYFILE = "key.pem"
SSH_FOREIGN_PORT = 5432 # Port that postgres is running on the foreign server
SSH_INTERNAL_PORT = 5432 # Port we open locally that is forwarded to
# FOREIGN_PORT on the server.
# Postgres Config
DB_HOST = "127.0.0.1"
DB_PORT = SSH_INTERNAL_PORT
DB_PASSWORD = "password"
DB_DATABASE = "postgres"
DB_USER = "user"
class SSHTunnel(object):
"""
A context manager implementation of an ssh tunnel opened from python
"""
def __init__(self, tunnel_command):
assert "-fN" in tunnel_command, "need to open the tunnel with -fN"
self._tunnel_command = tunnel_command
self._delay = 0.1
self.ssh_tunnel = None
def create_tunnel(self):
tunnel_cmd = self._tunnel_command
ssh_process = subprocess.Popen(tunnel_cmd, universal_newlines=True,
shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE)
# Assuming that the tunnel command has "-f" and "ExitOnForwardFailure=yes", then the
# command will return immediately so we can check the return status with a poll().
while True:
p = ssh_process.poll()
if p is not None: break
time.sleep(self._delay)
if p == 0:
# Unfortunately there is no direct way to get the pid of the spawned ssh process, so we'll find it
# by finding a matching process using psutil.
current_username = psutil.Process(os.getpid()).username()
ssh_processes = [proc for proc in psutil.process_iter() if proc.cmdline() == tunnel_cmd.split() and proc.username() == current_username]
if len(ssh_processes) == 1:
self.ssh_tunnel = ssh_processes[0]
return ssh_processes[0]
else:
raise RuntimeError('multiple (or zero?) tunnel ssh processes found: ' + str(ssh_processes))
else:
raise RuntimeError('Error creating tunnel: ' + str(p) + ' :: ' + str(ssh_process.stdout.readlines()))
def release(self):
""" Get rid of the tunnel by killin the pid
"""
if self.ssh_tunnel:
self.ssh_tunnel.terminate()
def __enter__(self):
self.create_tunnel()
return self
def __exit__(self, type, value, traceback):
self.release()
def __del__(self):
self.release()
command = "ssh -i %s %s#%s -fNL %d:localhost:%d"\
% (SSH_KEYFILE, SSH_USER, SSH_HOST, SSH_INTERNAL_PORT, SSH_FOREIGN_PORT)
with SSHTunnel(command):
conn = psycopg2.connect(host = DB_HOST, password = DB_PASSWORD,
database = DB_DATABASE, user = DB_USER, port = DB_PORT)
curs = conn.cursor()
sql = "select * from table"
curs.execute(sql)
rows = curs.fetchall()
print(rows)

For the moment I am using a solution bsed on this gist:
class SSHTunnel(object):
"""
A context manager implementation of an ssh tunnel opened from python
"""
def __init__(self, tunnel_command):
assert "-fN" in tunnel_command, "need to open the tunnel with -fN"
self._tunnel_command = tunnel_command
self._delay = 0.1
def create_tunnel(self):
tunnel_cmd = self._tunnel_command
import time, psutil, subprocess
ssh_process = subprocess.Popen(tunnel_cmd, universal_newlines=True,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE)
# Assuming that the tunnel command has "-f" and "ExitOnForwardFailure=yes", then the
# command will return immediately so we can check the return status with a poll().
while True:
p = ssh_process.poll()
if p is not None: break
time.sleep(self._delay)
if p == 0:
# Unfortunately there is no direct way to get the pid of the spawned ssh process, so we'll find it
# by finding a matching process using psutil.
current_username = psutil.Process(os.getpid()).username
ssh_processes = [proc for proc in psutil.get_process_list() if proc.cmdline == tunnel_cmd.split() and proc.username == current_username]
if len(ssh_processes) == 1:
self.ssh_tunnel = ssh_processes[0]
return ssh_processes[0]
else:
raise RuntimeError, 'multiple (or zero?) tunnel ssh processes found: ' + str(ssh_processes)
else:
raise RuntimeError, 'Error creating tunnel: ' + str(p) + ' :: ' + str(ssh_process.stdout.readlines())
def release(self):
""" Get rid of the tunnel by killin the pid
"""
self.ssh_tunnel.terminate()
def __enter__(self):
self.create_tunnel()
return self
def __exit__(self, type, value, traceback):
self.release()
def __del__(self):
self.release()
def test():
#do things that will fail if the tunnel is not opened
print "done =========="
command = "ssh username#someserver.com -L %d:localhost:%d -p 222 -fN" % (someport, someport)
with SSHTunnel(command):
test()
Please let me know if anybody has a better idea

from time import sleep
os.system("ssh username1#remote.somewhere.com -fNL 5432:localhost:5432 -p 222")
while True:
try:
conn = psycopg2.connect(
"host=localhost dbname={0} user={1} password={2}".format(
conf.dbname, conf.user, conf.password
)
)
break
except psycopg2.OperationalError:
sleep(3)

Related

Is there a way to remotely port forward in Python 3? Ex: ssh jumpserver -L 8000:internal_server_name:8000 -N

I have a manual process of port forwarding using OS based SSH forwarding currently and I would like to do this through Python so that the connection auto closes after. Similar to how a 'with open' would work for a file write.
I am currently using it like so:
ssh jumpserver -L 8000:internal_server_name:8000 -N
and calling the api locally like so:
http://localhost:8000/get-answer/
This method works, but again, I am looking for a using/with open method.
I tried this without luck:
remote_host = "internal_server_name"
remote_port = 8000
local_port = 8000
ssh_host = "jumpserver"
ssh_port = 22
user = "ubuntu"
pkey = "~/.ssh.id_rsa"
transport = paramiko.Transport((ssh_host, ssh_port))
# Command for paramiko-1.7.7.1
transport.connect(hostkey = None,
username = user,
password = None,
pkey = pkey)
try:
forward_tunnel(local_port, remote_host, remote_port, transport)
except KeyboardInterrupt:
print ('Port forwarding stopped.')
sys.exit(0)

How to run commands in REMOTE machine with the subprocess approach using passwordless authentication in python3?

I am trying to write a script which should perform some commands (defined by Payload) in a ssh enable remote computer. I want to have a passworless connection. So that I can use the public and private key authentication. I know how to do it in paramiko and its working. Is there any way to do it by subprocess and get the output? Is there any sample code for that?
My sample code is something like that. For example, I want to execute more connection later on.
import subprocess
def __init__ (type, options):
if type=="ssh":
ssh(options)
else if type="fsexec":
fsexec(options)
def ssh(self, ip, user, sshkey_file, payload):
try:
command = "ssh "
prog = subprocess.call(["ssh -i sshkey_file -t user#ip 'payload'"])
print(prog)
print("Returncode:", prog)
def fsexec(self, ip, user, sshkey_file, payload):
try:
command = "ssh "
prog = subprocess.call(["fsexec -t user#ip 'payload'"])
print(prog)
print("Returncode:", prog)
You should use the Paramiko library to log in with ssh and the keyfile.
I copied an exmaple from a gist (https://gist.github.com/batok/2352501):
import paramiko
k = paramiko.RSAKey.from_private_key_file("/Users/whatever/Downloads/mykey.pem")
c = paramiko.SSHClient()
c.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "connecting"
c.connect( hostname = "www.acme.com", username = "ubuntu", pkey = k )
print "connected"
commands = [ "/home/ubuntu/firstscript.sh", "/home/ubuntu/secondscript.sh" ]
for command in commands:
print "Executing {}".format( command )
stdin , stdout, stderr = c.exec_command(command)
print stdout.read()
print( "Errors")
print stderr.read()
c.close()

Paramiko - python SSH - multiple command under a single channel

i have read other Stackoverflow threads on this. Those are older posts, i would like to get the latest update.
Is it possible to send multiple commands over single channel in Paramiko ? or is it still not possible ?
If so, is there any other library which can do the same.
Example scenario, automating the Cisco router confi. : User need to first enter "Config t" before entering the other other commands. Its currently not possible in paramiko.
THanks.
if you are planning to use the exec_command() method provided within the paramiko API , you would be limited to send only a single command at a time , as soon as the command has been executed the channel is closed.
The below excerpt from Paramiko API docs .
exec_command(self, command) source code Execute a command on the
server. If the server allows it, the channel will then be directly
connected to the stdin, stdout, and stderr of the command being
executed.
When the command finishes executing, the channel will be closed and
can't be reused. You must open a new channel if you wish to execute
another command.
but since transport is also a form of socket , you can send commands without using the exec_command() method, using barebone socket programming.
Incase you have a defined set of commands then both pexpect and exscript can be used , where you read a set of commands form a file and send them across the channel.
See my answer here or this page
import threading, paramiko
strdata=''
fulldata=''
class ssh:
shell = None
client = None
transport = None
def __init__(self, address, username, password):
print("Connecting to server on ip", str(address) + ".")
self.client = paramiko.client.SSHClient()
self.client.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
self.client.connect(address, username=username, password=password, look_for_keys=False)
self.transport = paramiko.Transport((address, 22))
self.transport.connect(username=username, password=password)
thread = threading.Thread(target=self.process)
thread.daemon = True
thread.start()
def closeConnection(self):
if(self.client != None):
self.client.close()
self.transport.close()
def openShell(self):
self.shell = self.client.invoke_shell()
def sendShell(self, command):
if(self.shell):
self.shell.send(command + "\n")
else:
print("Shell not opened.")
def process(self):
global strdata, fulldata
while True:
# Print data when available
if self.shell is not None and self.shell.recv_ready():
alldata = self.shell.recv(1024)
while self.shell.recv_ready():
alldata += self.shell.recv(1024)
strdata = strdata + str(alldata)
fulldata = fulldata + str(alldata)
strdata = self.print_lines(strdata) # print all received data except last line
def print_lines(self, data):
last_line = data
if '\n' in data:
lines = data.splitlines()
for i in range(0, len(lines)-1):
print(lines[i])
last_line = lines[len(lines) - 1]
if data.endswith('\n'):
print(last_line)
last_line = ''
return last_line
sshUsername = "SSH USERNAME"
sshPassword = "SSH PASSWORD"
sshServer = "SSH SERVER ADDRESS"
connection = ssh(sshServer, sshUsername, sshPassword)
connection.openShell()
connection.send_shell('cmd1')
connection.send_shell('cmd2')
connection.send_shell('cmd3')
time.sleep(10)
print(strdata) # print the last line of received data
print('==========================')
print(fulldata) # This contains the complete data received.
print('==========================')
connection.close_connection()
Have a look at parallel-ssh:
from pssh.pssh2_client import ParallelSSHClient
cmds = ['my cmd1', 'my cmd2']
hosts = ['myhost']
client = ParallelSSHClient(hosts)
for cmd in cmds:
output = client.run_command(cmd)
# Wait for completion
client.join(output)
Single client, multiple commands over same SSH session and optionally multiple hosts in parallel - also non-blocking.
I find this simple to understand and use. Code provides 2 examples with singlehost and multihost. Also added example where you can login to a second user and continue your commands with that user.
More info can be found in here: https://parallel-ssh.readthedocs.io/en/latest/advanced.html?highlight=channel#interactive-shells
from pssh.clients import SSHClient
from pssh.exceptions import Timeout
from pssh.clients import ParallelSSHClient
from pssh.config import HostConfig
def singleHost():
host_ = "10.3.0.10"
pwd_ = "<pwd>"
pwd_root = "<root pwd>"
user_ = "<user>"
client = SSHClient(host_, user=user_, password=pwd_, timeout=4, num_retries=1)
#####
shell = client.open_shell(read_timeout=2)
shell.run("whoami")
# login as new user example
shell.run("su - root")
shell.stdin.write(pwd_root + "\n")
shell.stdin.flush()
shell.run("pwd")
try:
# Reading Partial Shell Output, with 'timeout' > client.open_shell(read_timeout=2)
for line in shell.stdout:
print(line)
except Timeout:
pass
shell.run("whoami")
shell.run("cd ..")
print(".......")
try:
# Reading Partial Shell Output, with 'timeout' > client.open_shell(read_timeout=2)
for line in shell.stdout:
print(line)
except Timeout:
pass
shell.close()
def multiHost():
pwd_ = "<pwd>"
user_ = "<user>"
workingIP_list = ["10.3.0.10", "10.3.0.10"]
host_config_ = []
# HostConfig is needed one per each 'workingIP_list'
host_config_.append(HostConfig(user=user_, password=pwd_))
host_config_.append(HostConfig(user=user_, password=pwd_))
client_ = ParallelSSHClient(workingIP_list, host_config=host_config_, num_retries=1, timeout=3)
# now you have an open shell
shells = client_.open_shell(read_timeout=2)
command = "pwd"
client_.run_shell_commands(shells, command)
try:
# Reading Partial Shell Output, with 'timeout' > client_.open_shell(read_timeout=2)
for line in shells[0].stdout:
print(line)
except Timeout:
pass
print(".......")
command = "cd repo/"
client_.run_shell_commands(shells, command)
command = "pwd"
client_.run_shell_commands(shells, command)
#Joined on shells are closed and may not run any further commands.
client_.join_shells(shells)
for shell in shells:
for line in shell.stdout:
print(line)
print(shell.exit_code)
if __name__ == '__main__':
print("singleHost example:")
singleHost()
print("multiHost example:")
multiHost()

Paramiko hangs after connecting to custom shell

I have the following script to connect to an custom ssh shell.
When I execute the script it just hangs. It doesnt execute the command. I suspect problems with the shell because it does not have any prompt. Do you have any idea?
import sys
import os
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect('10.115.130.22', username='admin', password='xxx', timeout = 30)
stdin, stdout, stderr = ssh.exec_command('xconfiguration SystemUnit Name: devicename')
print stdout.readlines()
ssh.close()`
I spent way too much time on this problem. I found that I needed to use the invoke_shell() to be able to get anything past the greeting banner on the Tandberg C/E series video endpoints. Here's my working code, FWIW:
import time
import paramiko
command = 'help'
host = 'x.x.x.x'
port = 22
user = 'admin'
passwd = 'TANDBERG'
def tbgShell(host,port,username,password,cmd):
"""send an arbitrary command to a Cisco/TBG gizmo's ssh and
get the result"""
transport = paramiko.Transport((host, port))
transport.connect(username = user, password = passwd)
chan = transport.open_channel("session")
chan.setblocking(0)
chan.invoke_shell()
out = ''
chan.send(cmd+'\n')
tCheck = 0
while not chan.recv_ready():
time.sleep(1)
tCheck+=1
if tCheck >= 6:
print 'time out'#TODO: add exeption here
return False
out = chan.recv(1024)
return out
output = tbgShell(host, port, user, passwd, command)
print output
This is a custom shell. It is a cisco ex90 video conferencing system.
But I tried different commands like xconfig which show you the config.

Perform commands over ssh with Python

I'm writing a script to automate some command line commands in Python. At the moment, I'm doing calls like this:
cmd = "some unix command"
retcode = subprocess.call(cmd,shell=True)
However, I need to run some commands on a remote machine. Manually, I would log in using ssh and then run the commands. How would I automate this in Python? I need to log in with a (known) password to the remote machine, so I can't just use cmd = ssh user#remotehost, I'm wondering if there's a module I should be using?
I will refer you to paramiko
see this question
ssh = paramiko.SSHClient()
ssh.connect(server, username=username, password=password)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmd_to_execute)
If you are using ssh keys, do:
k = paramiko.RSAKey.from_private_key_file(keyfilename)
# OR k = paramiko.DSSKey.from_private_key_file(keyfilename)
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=host, username=user, pkey=k)
Keep it simple. No libraries required.
import subprocess
# Python 2
subprocess.Popen("ssh {user}#{host} {cmd}".format(user=user, host=host, cmd='ls -l'), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
# Python 3
subprocess.Popen(f"ssh {user}#{host} {cmd}", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
Or you can just use commands.getstatusoutput:
commands.getstatusoutput("ssh machine 1 'your script'")
I used it extensively and it works great.
In Python 2.6+, use subprocess.check_output.
I found paramiko to be a bit too low-level, and Fabric not especially well-suited to being used as a library, so I put together my own library called spur that uses paramiko to implement a slightly nicer interface:
import spur
shell = spur.SshShell(hostname="localhost", username="bob", password="password1")
result = shell.run(["echo", "-n", "hello"])
print result.output # prints hello
If you need to run inside a shell:
shell.run(["sh", "-c", "echo -n hello"])
All have already stated (recommended) using paramiko and I am just sharing a python code (API one may say) that will allow you to execute multiple commands in one go.
to execute commands on different node use : Commands().run_cmd(host_ip, list_of_commands)
You will see one TODO, which I have kept to stop the execution if any of the commands fails to execute, I don't know how to do it. please share your knowledge
#!/usr/bin/python
import os
import sys
import select
import paramiko
import time
class Commands:
def __init__(self, retry_time=0):
self.retry_time = retry_time
pass
def run_cmd(self, host_ip, cmd_list):
i = 0
while True:
# print("Trying to connect to %s (%i/%i)" % (self.host, i, self.retry_time))
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host_ip)
break
except paramiko.AuthenticationException:
print("Authentication failed when connecting to %s" % host_ip)
sys.exit(1)
except:
print("Could not SSH to %s, waiting for it to start" % host_ip)
i += 1
time.sleep(2)
# If we could not connect within time limit
if i >= self.retry_time:
print("Could not connect to %s. Giving up" % host_ip)
sys.exit(1)
# After connection is successful
# Send the command
for command in cmd_list:
# print command
print "> " + command
# execute commands
stdin, stdout, stderr = ssh.exec_command(command)
# TODO() : if an error is thrown, stop further rules and revert back changes
# Wait for the command to terminate
while not stdout.channel.exit_status_ready():
# Only print data if there is data to read in the channel
if stdout.channel.recv_ready():
rl, wl, xl = select.select([ stdout.channel ], [ ], [ ], 0.0)
if len(rl) > 0:
tmp = stdout.channel.recv(1024)
output = tmp.decode()
print output
# Close SSH connection
ssh.close()
return
def main(args=None):
if args is None:
print "arguments expected"
else:
# args = {'<ip_address>', <list_of_commands>}
mytest = Commands()
mytest.run_cmd(host_ip=args[0], cmd_list=args[1])
return
if __name__ == "__main__":
main(sys.argv[1:])
paramiko finally worked for me after adding additional line, which is really important one (line 3):
import paramiko
p = paramiko.SSHClient()
p.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # This script doesn't work for me unless this line is added!
p.connect("server", port=22, username="username", password="password")
stdin, stdout, stderr = p.exec_command("your command")
opt = stdout.readlines()
opt = "".join(opt)
print(opt)
Make sure that paramiko package is installed.
Original source of the solution: Source
The accepted answer didn't work for me, here's what I used instead:
import paramiko
import os
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# ssh.load_system_host_keys()
ssh.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
ssh.connect("d.d.d.d", username="user", password="pass", port=22222)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command("ls -alrt")
exit_code = ssh_stdout.channel.recv_exit_status() # handles async exit error
for line in ssh_stdout:
print(line.strip())
total 44
-rw-r--r--. 1 root root 129 Dec 28 2013 .tcshrc
-rw-r--r--. 1 root root 100 Dec 28 2013 .cshrc
-rw-r--r--. 1 root root 176 Dec 28 2013 .bashrc
...
Alternatively, you can use sshpass:
import subprocess
cmd = """ sshpass -p "myPas$" ssh user#d.d.d.d -p 22222 'my command; exit' """
print( subprocess.getoutput(cmd) )
References:
https://github.com/onyxfish/relay/issues/11
https://stackoverflow.com/a/61016663/797495
Notes:
Just make sure to connect manually at least one time to the remote system via ssh (ssh root#ip) and accept the public key, this is many times the reason from not being able connect using paramiko or other automated ssh scripts.
I have used paramiko a bunch (nice) and pxssh (also nice). I would recommend either. They work a little differently but have a relatively large overlap in usage.
First: I'm surprised that no one has mentioned fabric yet.
Second: For exactly those requirements you describe I've implemented an own python module named jk_simpleexec. It's purpose: Making running commands easy.
Let me explain a little bit about it for you.
The 'executing a command locally' problem
My python module jk_simpleexec provides a function named runCmd(..) that can execute a shell (!) command locally or remotely. This is very simple. Here is an example for local execution of a command:
import jk_simpleexec
cmdResult = jk_simpleexec.runCmd(None, "cd / ; ls -la")
NOTE: Be aware that the returned data is trimmed automatically by default to remove excessive empty lines from STDOUT and STDERR. (Of course this behavior can be deactivated, but for the purpose you've in mind exactly that behavior is what you will want.)
The 'processing the result' problem
What you will receive is an object that contains the return code, STDOUT and STDERR. Therefore it's very easy to process the result.
And this is what you want to do as the command you execute might exist and is launched but might fail in doing what it is intended to do. In the most simple case where you're not interested in STDOUT and STDERR your code will likely look something like this:
cmdResult.raiseExceptionOnError("Something went wrong!", bDumpStatusOnError=True)
For debugging purposes you want to output the result to STDOUT at some time, so for this you can do just this:
cmdResult.dump()
If you would want to process STDOUT it's simple as well. Example:
for line in cmdResult.stdOutLines:
print(line)
The 'executing a command remotely' problem
Now of course we might want to execute this command remotely on another system. For this we can use the same function runCmd(..) in exactly the same way but we need to specify a fabric connection object first. This can be done like this:
from fabric import Connection
REMOTE_HOST = "myhost"
REMOTE_PORT = 22
REMOTE_LOGIN = "mylogin"
REMOTE_PASSWORD = "mypwd"
c = Connection(host=REMOTE_HOST, user=REMOTE_LOGIN, port=REMOTE_PORT, connect_kwargs={"password": REMOTE_PASSWORD})
cmdResult = jk_simpleexec.runCmd(c, "cd / ; ls -la")
# ... process the result stored in cmdResult ...
c.close()
Everything remains exactly the same, but this time we run this command on another host. This is intended: I wanted to have a uniform API where there are no modifications required in the software if you at some time decide to move from the local host to another host.
The password input problem
Now of course there is the password problem. This has been mentioned above by some users: We might want to ask the user executing this python code for a password.
For this problem I have created an own module quite some time ago. jk_pwdinput. The difference to regular password input is that jk_pwdinput will output some stars instead of just printing nothing. So for every password character you type you will see a star. This way it's more easy for you to enter a password.
Here is the code:
import jk_pwdinput
# ... define other 'constants' such as REMOTE_LOGIN, REMOTE_HOST ...
REMOTE_PASSWORD = jk_pwdinput.readpwd("Password for " + REMOTE_LOGIN + "#" + REMOTE_HOST + ": ")
(For completeness: If readpwd(..) returned None the user canceled the password input with Ctrl+C. In a real world scenario you might want to act on this appropriately.)
Full example
Here is a full example:
import jk_simpleexec
import jk_pwdinput
from fabric import Connection
REMOTE_HOST = "myhost"
REMOTE_PORT = 22
REMOTE_LOGIN = "mylogin"
REMOTE_PASSWORD = jk_pwdinput.readpwd("Password for " + REMOTE_LOGIN + "#" + REMOTE_HOST + ": ")
c = Connection(host=REMOTE_HOST, user=REMOTE_LOGIN, port=REMOTE_PORT, connect_kwargs={"password": REMOTE_PASSWORD})
cmdResult = jk_simpleexec.runCmd(
c = c,
command = "cd / ; ls -la"
)
cmdResult.raiseExceptionOnError("Something went wrong!", bDumpStatusOnError=True)
c.close()
Final notes
So we have the full set:
Executing a command,
executing that command remotely via the same API,
creating the connection in an easy and secure way with password input.
The code above solves the problem quite well for me (and hopefully for you as well). And everything is open source: Fabric is BSD-2-Clause, and my own modules are provided under Apache-2.
Modules used:
fabric : http://www.fabfile.org/
jk_pwdinput : https://github.com/jkpubsrc/python-module-jk-pwdinput
jk_simplexec : https://github.com/jkpubsrc/python-module-jk-simpleexec
Happy coding! ;-)
Works Perfectly...
import paramiko
import time
ssh = paramiko.SSHClient()
#ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect('10.106.104.24', port=22, username='admin', password='')
time.sleep(5)
print('connected')
stdin, stdout, stderr = ssh.exec_command(" ")
def execute():
stdin.write('xcommand SystemUnit Boot Action: Restart\n')
print('success')
execute()
You can use any of these commands, this will help you to give a password also.
cmd = subprocess.run(["sshpass -p 'password' ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root#domain.com ps | grep minicom"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
print(cmd.stdout)
OR
cmd = subprocess.getoutput("sshpass -p 'password' ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null root#domain.com ps | grep minicom")
print(cmd)
Have a look at spurplus, a wrapper we developed around spur that provides type annotations and some minor gimmicks (reconnecting SFTP, md5 etc.): https://pypi.org/project/spurplus/
Asking User to enter the command as per the device they are logging in.
The below code is validated by PEP8online.com.
import paramiko
import xlrd
import time
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
loc = ('/Users/harshgow/Documents/PYTHON_WORK/labcred.xlsx')
wo = xlrd.open_workbook(loc)
sheet = wo.sheet_by_index(0)
Host = sheet.cell_value(0, 1)
Port = int(sheet.cell_value(3, 1))
User = sheet.cell_value(1, 1)
Pass = sheet.cell_value(2, 1)
def details(Host, Port, User, Pass):
time.sleep(2)
ssh.connect(Host, Port, User, Pass)
print('connected to ip ', Host)
stdin, stdout, stderr = ssh.exec_command("")
x = input('Enter the command:')
stdin.write(x)
stdin.write('\n')
print('success')
details(Host, Port, User, Pass)
#Reading the Host,username,password,port from excel file
import paramiko
import xlrd
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
loc = ('/Users/harshgow/Documents/PYTHON_WORK/labcred.xlsx')
wo = xlrd.open_workbook(loc)
sheet = wo.sheet_by_index(0)
Host = sheet.cell_value(0,1)
Port = int(sheet.cell_value(3,1))
User = sheet.cell_value(1,1)
Pass = sheet.cell_value(2,1)
def details(Host,Port,User,Pass):
ssh.connect(Host, Port, User, Pass)
print('connected to ip ',Host)
stdin, stdout, stderr = ssh.exec_command("")
stdin.write('xcommand SystemUnit Boot Action: Restart\n')
print('success')
details(Host,Port,User,Pass)
The most modern approach is probably to use fabric. This module allows you to set up an SSH connection and then run commands and get their results over the connection object.
Here's a simple example:
from fabric import Connection
with Connection("your_hostname") as connection:
result = connection.run("uname -s", hide=True)
msg = "Ran {0.command!r} on {0.connection.host}, got stdout:\n{0.stdout}"
print(msg.format(result))
I wrote a simple class to run commands on remote over native ssh, using the subprocess module:
Usage
from ssh_utils import SshClient
client = SshClient(user='username', remote='remote_host', key='path/to/key.pem')
# run a list of commands
client.cmd(['mkdir ~/testdir', 'ls -la', 'echo done!'])
# copy files/dirs
client.scp('my_file.txt', '~/testdir')
Class source code
https://gist.github.com/mamaj/a7b378a5c969e3e32a9e4f9bceb0c5eb
import subprocess
from pathlib import Path
from typing import Union
class SshClient():
""" Perform commands and copy files on ssh using subprocess
and native ssh client (OpenSSH).
"""
def __init__(self,
user: str,
remote: str,
key_path: Union[str, Path]) -> None:
"""
Args:
user (str): username for the remote
remote (str): remote host IP/DNS
key_path (str or pathlib.Path): path to .pem file
"""
self.user = user
self.remote = remote
self.key_path = str(key_path)
def cmd(self,
cmds: list[str],
strict_host_key_checking=False) -> None:
"""runs commands consecutively, ensuring success of each
after calling the next command.
Args:
cmds (list[str]): list of commands to run.
strict_host_key_checking (bool, optional): Defaults to True.
"""
strict_host_key_checking = 'yes' if strict_host_key_checking \
else 'no'
cmd = ' && '.join(cmds)
subprocess.run(
[
'ssh',
'-i', self.key_path,
'-o', f'StrictHostKeyChecking={strict_host_key_checking}',
'-o', 'UserKnownHostsFile=/dev/null',
f'{self.user}#{self.remote}',
cmd
]
)
def scp(self, source: Union[str, Path], destination: Union[str, Path]):
"""Copies `srouce` file to remote `destination` using the
native `scp` command.
Args:
source (Union[str, Path]): Source file path.
destination (Union[str, Path]): Destination path on remote.
"""
subprocess.run(
[
'scp',
'-i', self.key_path,
str(source),
f'{self.user}#{self.remote}:{str(destination)}',
]
)
Below example, incase if you want user inputs for hostname,username,password and port no.
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
def details():
Host = input("Enter the Hostname: ")
Port = input("Enter the Port: ")
User = input("Enter the Username: ")
Pass = input("Enter the Password: ")
ssh.connect(Host, Port, User, Pass, timeout=2)
print('connected')
stdin, stdout, stderr = ssh.exec_command("")
stdin.write('xcommand SystemUnit Boot Action: Restart\n')
print('success')
details()

Categories