SSH connection not persistent - python

I have the following script which SSH's into a network server and executes some commands, for some reason the SSH connection opens but by the time the commands are executed it closes (I think), as a result the commands are failing with below error? Can anyone provide info how to make the SSH connection persistent?
#!/usr/bin/python
import os
import sys
import json
import fileinput
import pwd
from subprocess import Popen, PIPE, STDOUT
import re
import paramiko
import MySQLdb
resource = r'qca-cdit-01'
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(resource, username='username', password='passwordname')
#chan = ssh.get_transport().open_session()
chan = ssh.invoke_shell()
chan.get_pty()
commandstringlist = \
['/local/mnt/workspace/LA_host_builds/AU_LINUX_ANDROID_LA.BF64.1.2.1_RB2.05.01.01.081.031_msm8992',
'cd frameworks/base',
'git fetch ssh://cdit#review-android.company.com:29418/platform/frameworks/base refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD']
for cmd_val in commandstringlist:
#chan.exec_command(cmd_val)
chan.send(cmd_val)
print(chan.recv(1024))
error:
Traceback (most recent call last):
File "ssh_test.py", line 21, in <module>
chan.get_pty()
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 60, in _check
return func(self, *args, **kwds)
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 177, in get_pty
self._wait_for_event()
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 1086, in _wait_for_event
raise e
paramiko.ssh_exception.SSHException: Channel closed

Every command you execute using exec_command has a channel of its own and therefore a context of its own. That context includes the working directory. You change the working directory in one context and then try to use it in another. Instead, use the same channel for all the commands. You can either open a channel and use it, or just issue all the commands at once.
commandstringlist = ['cd /local/mnt/workspace/test2 && cd data/log && git fetch ssh://username#review-android.company.com:29418/platform/data/log refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD']
Here are a few other questions that should explain this in more detail.
https://unix.stackexchange.com/questions/80821/why-does-cd-command-not-work-via-ssh
https://superuser.com/questions/46851/keeping-working-directory-across-ssh
https://stackoverflow.com/a/6770272/492773

The trick is to change to the directory before executing the command. You can easily integrate it into your execute_command:
def execute_command (cmd, pwd=None):
if pwd:
cmd = 'cd "%s";%s' % (pwd, cmd)
print cmd
si,so,se = ssh.exec_command(cmd)
print os.getcwd()
print "printing so"
soreadList = so.readlines()
print soreadList
print "printing se"
errList = se.readlines()
print errList
The command is run by a shell on the remote machine, so any shell tricks like setting environment variables could be added also.

Simply do :
#!/usr/bin/python
import os
import sys
import json
import fileinput
import pwd
from subprocess import Popen, PIPE, STDOUT
import re
import paramiko
import MySQLdb
resource = r'qca-cdit-01'
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(resource, username='username', password='passwordname')
#chan = ssh.get_transport().open_session()
chan = ssh.invoke_shell()
commandstringlist = \
['\n/local/mnt/workspace/LA_host_builds/AU_LINUX_ANDROID_LA.BF64.1.2.1_RB2.05.01.01.081.031_msm8992\n',
'\ncd frameworks/base\n',
'\ngit fetch ssh://cdit#review-android.company.com:29418/platform/frameworks/base refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD\n']
for cmd_val in commandstringlist:
#chan.exec_command(cmd_val)
chan.send(cmd_val)
print(chan.recv(8192))
which worked for me. (You forgot .send() doesn’t add lines automatically)

Can anyone provide info how to make the SSH connection persistent?
The following is an example of how to make an SSH connection persistent by attempting to execute a command and then falling back on connecting then executing the command when errors occur.
import paramiko
class sshConnection:
def __init__( self, host, un, pw ):
self.host = host
self.un = un
self.pw = pw
def connect( self ):
self.ssh = paramiko.SSHClient()
self.ssh.load_system_host_keys()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.host, username=self.un, password=self.pw)
def cmd ( self, cmd, tries=0 ):
self.si, self.so, self.se = None, None, None
try:
self.si, self.so, self.se= self.ssh.exec_command(cmd)
except Exception, e:
if tries > 3:
raise
self.connect( )
self.cmd( cmd, tries+1 )
return self.si, self.so, self.se
conn = sshConnection( "host","username","password" )
si, out, err = conn.cmd("ls -al")
print "\n".join(map ( lambda x: x.strip(), out.readlines()) )

Related

Python spur: how to read output of long-running command

I'm trying to use the spur library to launch a long-running command via ssh then read and process the output from it one line at a time. The documentation says you can pass a file object using stdout=f and run/spawn will call stdout.write for anything the subprocess writes to its stdout stream. I hit on the idea of creating an os.pipe() to make this work, but it doesn't. Can someone please suggest a fix.
NOTE: I've already got this working with paramiko.SSHClient.exec_command but the interface is a bit low-level for my needs, so I want to learn how to do it with spur. Thanks!
import spur
import os
HOST = "rocky.lan"
USER = "rocky"
CMD = "while sleep 1; do date; done"
r, w = os.pipe()
r = os.fdopen(r, 'rb')
w = os.fdopen(w, 'wb')
ssh = spur.SshShell(hostname=HOST, username=USER)
child = ssh.spawn(CMD, stdout=w)
for line in iter(r.readline, ""):
print(line, end="")
Since someone is bound to ask, the parakimo code looks like this:-
from paramiko import SSHClient
HOST = "rocky.lan"
USER = "rocky"
CMD = "while sleep 1; do date; done"
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.connect(HOST, username=USER)
stdin, stdout, stderr = ssh.exec_command(CMD)
for line in iter(stdout.readline, ""):
print(line, end="")
I've discovered parallel-ssh which seems to have parted company from paramiko and gone for python-ssh/python-ssh2 instead. A 5-minute test suggests that it combines paramiko's power with spur's simplicity, but sadly still doesn't support ~/.ssh/config, so Perl's Net::OpenSSH is still my favourite :-) Here's the code I got working with pssh:-
from pssh.clients import SSHClient
HOST = "rocky.lan"
USER = "rocky"
CMD = "while sleep 1; do date; done"
ssh = SSHClient(host=HOST, user=USER)
cmd = ssh.run_command(CMD)
for line in cmd.stdout:
print(line)
So this is an alternative, but really I still need to know how to read the subprocess's stdout using spur.

ssh to a device with super user with paramiko [duplicate]

This question already has an answer here:
Executing command using "su -l" in SSH using Python
(1 answer)
Closed 1 year ago.
I am trying to make script to ssh my device with r/o user and then super user and execute a command.Below are the steps i did on my putty
login as admin ( r/o user) and password.
sudo su and than password
execute a command
print output of executed command
i tried below code to make it working but i didn't get any output.
import paramiko
import pandas as pd
import openpyxl
from paramiko import AuthenticationException
from paramiko.ssh_exception import SSHException, NoValidConnectionsError
import socket
import time
import os
import inspect
import datetime
start = time.time()
print("Starting................................Please Wait")
df=pd.DataFrame(columns=["ip","Status","Remarks"])
ips = ['10.11.8.71']
root = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
#ips = open (root+ "/440_ip.txt")
for ipp in ips:
ip = ipp.strip()
port=22
username='admin'
#password='AeGEBUx66m_1ND'
#cmd='interface wireless set [ find default-name=wlan1 ] ampdu-priorities=0,1,2,3,4,5,6,7 rate-set=configured rx-chains=0,1 scan-list=5825-5875 security-profile=iB440 ssid=iBw supported-rates-a/g="" basic-rates-a/g=""'
ssh=paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip,port,username,timeout=5,password='HeWGEUx66m=_4!ND')
stdin, stdout, stderr = ssh.exec_command('sudo bash', get_pty = True)
time.sleep(0.1)
stdin.write('HeWGEUx66m=_4!ND\n')
stdin.flush()
stdin.write('whoami\n')
#time.sleep(1)
stdin.flush()
dd = stdout.readlines()
print(dd)
ssh.close()
After running the code there is no error, seems it stuck in some loop.,
Starting................................Please Wait
Using ssh with plink in single line command
C:\Users\Administrator\AppData\Local\Programs\Python\Python38>plink.exe -ssh -t admin#10.11.8.71 sudo bash admin#10.11.8.71's password: Access granted. Press Return to begin session. Password: bash-3.2# whoami root bash-3.2#```
To make it working properl i used channels from paramiko, and it worked like charm.
import paramiko
from paramiko import *
from paramiko.channel import Channel
import time
import os
import inspect
import datetime
from socket import *
import pandas as pd
df = pd.DataFrame(columns=["Ip","Status",'Remarks'])
root = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
#ips = ['10.23.0.30', '10.23.0.11','10.23.0.12','10.23.0.13']
ips = open(root+ "\\ahip.txt")
for ipp in ips:
ip = ipp.strip()
print(ip)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(ip, port=22,timeout = 5,username='op', password='C#Uj!AnX')
channel:Channel = ssh.invoke_shell()
#print(type(channel))
channel_data = str()
while True:
#channel.recv_ready():
#time.sleep(1)
channel_data += str(channel.recv(999))
channel.send("su -\n")
time.sleep(1)
#channel_data += str(channel.recv(999))
# if "Password" in channel_data:
channel.send("HeWGEUx\n")
time.sleep(3)
#channel_data += str(channel.recv(999))
channel.send("/bs/lteCli\n")
time.sleep(3)
channel.send("logger threshold set cli=6\n")
time.sleep(4)
channel.send("db set stackCfg [1] EnCLPC=0\n")
time.sleep(4)
channel.send("db get stackCfg EnCLPC\n")
time.sleep(3)
#channel.send("db get stackCfg EnCLPC\n")
time.sleep(.1)
channel_data += str(channel.recv(99999))
str2 = 'Done'
df.loc[ip]=[ip,str2,channel_data]
#print(channel_data)
channel.close()
ssh.close()
break
except (timeout ,AuthenticationException):
print(ip+'not done')
str1 = 'Not Done'
df.loc[ip]=[ip,'failed',str1]

Run script with arguments via ssh with at command from python script

I have a python program which needs to call a script on a remote system via ssh.
This ssh call needs to happen (once) at a specified date which can be done via the linux at command.
I am able to call both of these external bash commands using either the os module or the subprocess module from my python program. The issue comes when passing certain arguments to the remote script.
In addition to being run remotely and at a later date, the (bash) script I wish to call requires several arguments to be passed to it, these arguments are python variables which I wish to pass on to the script.
user="user#remote"
arg1="argument with spaces"
arg2="two"
cmd="ssh "+user+"' /home/user/path/script.sh "+arg1+" "+arg2+"'"
os.system(cmd)
One of these arguments is a string which contains spaces but would ideally be passed as a single argument;
for example:
./script.sh "Argument with Spaces"
where $1 is equal to "Argument with Spaces"
I have tried various combinations of escaping double and single quotes in both python and the string itself and the use of grave accents around the entire ssh command. The most successful version calls the script with the arguments as desired, but ignores the at command and runs immediately.
Is there a clean way within python to accomplish this?
new answer
now that you edited your question you should probably be using format strings
cmd = '''ssh {user} "{cmd} '{arg0}' '{arg1}'"'''.format(user="user#remote",cmd="somescript",arg0="hello",arg2="hello world")
print cmd
old answer
I think you can use a -c switch with ssh to execute some code on a remote machine (ssh user#host.net -c "python myscript.py arg1 arg2")
alternatively I needed more than that so I use this paramiko wrapper class (you will need to install paramiko)
from contextlib import contextmanager
import os
import re
import paramiko
import time
class SshClient:
"""A wrapper of paramiko.SSHClient"""
TIMEOUT = 10
def __init__(self, connection_string,**kwargs):
self.key = kwargs.pop("key",None)
self.client = kwargs.pop("client",None)
self.connection_string = connection_string
try:
self.username,self.password,self.host = re.search("(\w+):(\w+)#(.*)",connection_string).groups()
except (TypeError,ValueError):
raise Exception("Invalid connection sting should be 'user:pass#ip'")
try:
self.host,self.port = self.host.split(":",1)
except (TypeError,ValueError):
self.port = "22"
self.connect(self.host,int(self.port),self.username,self.password,self.key)
def reconnect(self):
self.connect(self.host,int(self.port),self.username,self.password,self.key)
def connect(self, host, port, username, password, key=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, pkey=key, timeout=self.TIMEOUT)
def close(self):
if self.client is not None:
self.client.close()
self.client = None
def execute(self, command, sudo=False,**kwargs):
should_close=False
if not self.is_connected():
self.reconnect()
should_close = True
feed_password = False
if sudo and self.username != "root":
command = "sudo -S -p '' %s" % command
feed_password = self.password is not None and len(self.password) > 0
stdin, stdout, stderr = self.client.exec_command(command,**kwargs)
if feed_password:
stdin.write(self.password + "\n")
stdin.flush()
result = {'out': stdout.readlines(),
'err': stderr.readlines(),
'retval': stdout.channel.recv_exit_status()}
if should_close:
self.close()
return result
#contextmanager
def _get_sftp(self):
yield paramiko.SFTPClient.from_transport(self.client.get_transport())
def put_in_dir(self, src, dst):
if not isinstance(src,(list,tuple)):
src = [src]
print self.execute('''python -c "import os;os.makedirs('%s')"'''%dst)
with self._get_sftp() as sftp:
for s in src:
sftp.put(s, dst+os.path.basename(s))
def get(self, src, dst):
with self._get_sftp() as sftp:
sftp.get(src, dst)
def rm(self,*remote_paths):
for p in remote_paths:
self.execute("rm -rf {0}".format(p),sudo=True)
def mkdir(self,dirname):
print self.execute("mkdir {0}".format(dirname))
def remote_open(self,remote_file_path,open_mode):
with self._get_sftp() as sftp:
return sftp.open(remote_file_path,open_mode)
def is_connected(self):
transport = self.client.get_transport() if self.client else None
return transport and transport.is_active()
you can then use it as follows
client = SshClient("username:password#host.net")
result = client.execute("python something.py cmd1 cmd2")
print result
result2 = client.execute("cp some_file /etc/some_file",sudo=True)
print result2

source command does not take effect

I have a cluster of machines, and I write a Python script to change the hostname; the code follows.
What puzzles me is that the source command only takes effect on some machines, not all. After I repeat several times, then all hostname take effect [BTW: I can change the hostname in /etc/hostname, but the hostname service do not take effect.]
import paramiko
import time
import threading
import os
cmd0 = "sudo source /root/.bashrc"
cmd1 = "sudo service hostname stop"
cmd2 = "sudo service hostname start"
def executeit(ip):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip, 22, "root")
ssh.exec_command(cmd0)
stdin, stdout, stderr = ssh.exec_command(cmd1)
out = stdout.readlines()
for o in out:
print(o)
time.sleep(1)
ssh.exec_command(cmd2)
ssh.close()
print("=======")
def main():
fr = open("info.txt", "r")
contents = fr.read().splitlines()
fr.close()
for ip in contents:
t = threading.Thread(target=executeit, args=(ip,))
t.start()
main()

python subprocess communicate freezes

I have the following python code that hangs :
cmd = ["ssh", "-tt", "-vvv"] + self.common_args
cmd += [self.host]
cmd += ["cat > %s" % (out_path)]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(in_string)
It is supposed to save a string (in_string) into a remote file over ssh.
The file is correctly saved but then the process hangs. If I use
cmd += ["echo"] instead of
cmd += ["cat > %s" % (out_path)]
the process does not hang so I am pretty sure that I misunderstand something about the way communicate considers that the process has exited.
do you know how I should write the command so the the "cat > file" does not make communicate hang ?
-tt option allocates tty that prevents the child process to exit when .communicate() closes p.stdin (EOF is ignored). This works:
import pipes
from subprocess import Popen, PIPE
cmd = ["ssh", self.host, "cat > " + pipes.quote(out_path)] # no '-tt'
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(in_string)
You could use paramiko -- pure Python ssh library, to write data to a remote file via ssh:
#!/usr/bin/env python
import os
import posixpath
import sys
from contextlib import closing
from paramiko import SSHConfig, SSHClient
hostname, out_path, in_string = sys.argv[1:] # get from command-line
# load parameters to setup ssh connection
config = SSHConfig()
with open(os.path.expanduser('~/.ssh/config')) as config_file:
config.parse(config_file)
d = config.lookup(hostname)
# connect
with closing(SSHClient()) as ssh:
ssh.load_system_host_keys()
ssh.connect(d['hostname'], username=d.get('user'))
with closing(ssh.open_sftp()) as sftp:
makedirs_exists_ok(sftp, posixpath.dirname(out_path))
with sftp.open(out_path, 'wb') as remote_file:
remote_file.write(in_string)
where makedirs_exists_ok() function mimics os.makedirs():
from functools import partial
from stat import S_ISDIR
def isdir(ftp, path):
try:
return S_ISDIR(ftp.stat(path).st_mode)
except EnvironmentError:
return None
def makedirs_exists_ok(ftp, path):
def exists_ok(mkdir, name):
"""Don't raise an error if name is already a directory."""
try:
mkdir(name)
except EnvironmentError:
if not isdir(ftp, name):
raise
# from os.makedirs()
head, tail = posixpath.split(path)
if not tail:
assert path.endswith(posixpath.sep)
head, tail = posixpath.split(head)
if head and tail and not isdir(ftp, head):
exists_ok(partial(makedirs_exists_ok, ftp), head) # recursive call
# do create directory
assert isdir(ftp, head)
exists_ok(ftp.mkdir, path)
It makes sense that the cat command hangs. It is waiting for an EOF. I tried sending an EOF in the string but couldn't get it to work. Upon researching this question, I found a great module for streamlining the use of SSH for command line tasks like your cat example. It might not be exactly what you need for your usecase, but it does do what your question asks.
Install fabric with
pip install fabric
Inside a file called fabfile.py put
from fabric.api import run
def write_file(in_string, path):
run('echo {} > {}'.format(in_string,path))
And then run this from the command prompt with,
fab -H username#host write_file:in_string=test,path=/path/to/file

Categories