Run script with arguments via ssh with at command from python script - python

I have a python program which needs to call a script on a remote system via ssh.
This ssh call needs to happen (once) at a specified date which can be done via the linux at command.
I am able to call both of these external bash commands using either the os module or the subprocess module from my python program. The issue comes when passing certain arguments to the remote script.
In addition to being run remotely and at a later date, the (bash) script I wish to call requires several arguments to be passed to it, these arguments are python variables which I wish to pass on to the script.
user="user#remote"
arg1="argument with spaces"
arg2="two"
cmd="ssh "+user+"' /home/user/path/script.sh "+arg1+" "+arg2+"'"
os.system(cmd)
One of these arguments is a string which contains spaces but would ideally be passed as a single argument;
for example:
./script.sh "Argument with Spaces"
where $1 is equal to "Argument with Spaces"
I have tried various combinations of escaping double and single quotes in both python and the string itself and the use of grave accents around the entire ssh command. The most successful version calls the script with the arguments as desired, but ignores the at command and runs immediately.
Is there a clean way within python to accomplish this?

new answer
now that you edited your question you should probably be using format strings
cmd = '''ssh {user} "{cmd} '{arg0}' '{arg1}'"'''.format(user="user#remote",cmd="somescript",arg0="hello",arg2="hello world")
print cmd
old answer
I think you can use a -c switch with ssh to execute some code on a remote machine (ssh user#host.net -c "python myscript.py arg1 arg2")
alternatively I needed more than that so I use this paramiko wrapper class (you will need to install paramiko)
from contextlib import contextmanager
import os
import re
import paramiko
import time
class SshClient:
"""A wrapper of paramiko.SSHClient"""
TIMEOUT = 10
def __init__(self, connection_string,**kwargs):
self.key = kwargs.pop("key",None)
self.client = kwargs.pop("client",None)
self.connection_string = connection_string
try:
self.username,self.password,self.host = re.search("(\w+):(\w+)#(.*)",connection_string).groups()
except (TypeError,ValueError):
raise Exception("Invalid connection sting should be 'user:pass#ip'")
try:
self.host,self.port = self.host.split(":",1)
except (TypeError,ValueError):
self.port = "22"
self.connect(self.host,int(self.port),self.username,self.password,self.key)
def reconnect(self):
self.connect(self.host,int(self.port),self.username,self.password,self.key)
def connect(self, host, port, username, password, key=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, pkey=key, timeout=self.TIMEOUT)
def close(self):
if self.client is not None:
self.client.close()
self.client = None
def execute(self, command, sudo=False,**kwargs):
should_close=False
if not self.is_connected():
self.reconnect()
should_close = True
feed_password = False
if sudo and self.username != "root":
command = "sudo -S -p '' %s" % command
feed_password = self.password is not None and len(self.password) > 0
stdin, stdout, stderr = self.client.exec_command(command,**kwargs)
if feed_password:
stdin.write(self.password + "\n")
stdin.flush()
result = {'out': stdout.readlines(),
'err': stderr.readlines(),
'retval': stdout.channel.recv_exit_status()}
if should_close:
self.close()
return result
#contextmanager
def _get_sftp(self):
yield paramiko.SFTPClient.from_transport(self.client.get_transport())
def put_in_dir(self, src, dst):
if not isinstance(src,(list,tuple)):
src = [src]
print self.execute('''python -c "import os;os.makedirs('%s')"'''%dst)
with self._get_sftp() as sftp:
for s in src:
sftp.put(s, dst+os.path.basename(s))
def get(self, src, dst):
with self._get_sftp() as sftp:
sftp.get(src, dst)
def rm(self,*remote_paths):
for p in remote_paths:
self.execute("rm -rf {0}".format(p),sudo=True)
def mkdir(self,dirname):
print self.execute("mkdir {0}".format(dirname))
def remote_open(self,remote_file_path,open_mode):
with self._get_sftp() as sftp:
return sftp.open(remote_file_path,open_mode)
def is_connected(self):
transport = self.client.get_transport() if self.client else None
return transport and transport.is_active()
you can then use it as follows
client = SshClient("username:password#host.net")
result = client.execute("python something.py cmd1 cmd2")
print result
result2 = client.execute("cp some_file /etc/some_file",sudo=True)
print result2

Related

Python multiprocessing-like execution over ssh

Looking at the basic example from python's multiprocessing docs page:
from multiprocessing import Pool
def f(x):
return x*x
if __name__ == '__main__':
with Pool(5) as p:
print(p.map(f, [1, 2, 3]))
This will execute f in separate processes that are auto-started, but on the local machine.
I see that it has support for remote execution, but that requires the managers to be started manually and also looks to be networking-only (i.e. outside of SSH, with no support for e.g. stdin / stdout serialization or something of sorts).
Is there a way to call python functions (as opposed to executables, as can be done e.g. using paramiko.client.SSHClient.exec_command) on remote hosts via SSH automatically? By "automatically" I mean without needing to manually handle process starting / stopping and communication (serialization of input parameters and return value).
The following code below is an example of how I would execute multiple remote commands concurrently using a multithreading pool. command1 below shows how I would invoke a remote Python function. Note that the full path to the Python interpreter may be required (unless it is in your home directory) since the usual environment PATH variable is not set up since your .bash_profile script will not have been executed.
The return value from invoking foo is "printed" out and thus will be the stdout output response returned by function connect_and_execute_command. This by definition will be a string. If the type returned by foo is a builtin type, such as an int or a dict containing builtin types for its values, then the string representation of the type can be converted back into its "real" type using function ast.literal_eval.
import paramiko
from multiprocessing.pool import ThreadPool
from ast import literal_eval
def execute_command(client, command):
"""
Execute a command with client, which is already connect to some host.
"""
stdin_, stdout_, stderr_ = client.exec_command(command)
return [stdout_.read().decode(), stderr_.read().decode()]
def connect(hostname, username, password=None):
client = paramiko.client.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
client.connect(hostname=hostname, username=username, password=password)
return client
def connect_and_execute_command(command, hostname, username, password=None):
with connect(hostname, username) as client:
return execute_command(client, command)
command0, host0, user0, password0 = 'ls -l', 'some_host0', 'some_username0', 'some_password0'
command1, host1, user1, password1 = '''~/my_local_python/python -c "from temp import foo; print(foo(6), end='')"''', 'some_host1', 'some_username0', 'some_password1'
requests = ((command0, host0, user0, password0), (command1, host1, user1, password1))
with ThreadPool(len(requests)) as pool:
results = pool.starmap(connect_and_execute_command, requests)
# Convert stdout response from command1:
results[1][0] = literal_eval(results[1][0])
for stdout_response, _ in results:
print(stdout_response, end='')
print()

Python subprocess (incorrectly?) merging stderr and stdout for ssh

I'm trying to run various linux commands via python's subprocess and ssh. I'd like to be able to run the command and check the stderr and stdout lengths to determine if the command was successful or not. For example if there's no error it was successful. The problem is that after the initial connection all the output is going to stdout.
I did try using paramiko but kept getting unreliable authentication behaviour. This approach seems more robust, if I could just get it to work.
import subprocess
import os
import pty
from select import select
class open_ssh_helper(object):
def __init__(self, host="127.0.0.1", user="root"):
self.host = host
self.user = user
self.cmd = ['ssh',
user +'#'+host,
"-o", "StrictHostKeyChecking=no",
"-o", "UserKnownHostsFile=/dev/null"]
self.prompt = '~#'
self.error = ''
self.output = ''
self.mtty_stdin, self.stty_stdin = pty.openpty()
self.mtty_stdout, self.stty_stdout = pty.openpty()
self.mtty_stderr, self.stty_stderr = pty.openpty()
self.ssh = subprocess.Popen(self.cmd,
shell=False,
stdin=self.stty_stdin,
stdout=self.stty_stdout,
stderr=self.stty_stderr)
self._read_stderr()
self._read_stdout()
def _reader(self, read_pty):
char = ""
buf = ""
while True:
rs, ws, es = select([read_pty], [], [], 0.5)
if read_pty in rs:
char = os.read(rs[0], 1)
buf += char
else:
break
return buf
def _read_stderr(self):
self.error = self._reader(self.mtty_stderr)
def _read_stdout(self):
self.output = self._reader(self.mtty_stdout)
def execute(self, command):
os.write(self.mtty_stdin, command)
self._read_stderr()
self._read_stdout()
if __name__=='__main__':
ss = open_ssh_helper('10.201.202.236', 'root')
print "Error: \t\t: " + ss.error
print "Output: \t: " + ss.output
ss.execute("cat /not/a/file\n")
print "Error: \t\t: " + ss.error
print "Output: \t: " + ss.output
Which outputs something like:
Error: : Warning: Permanently added '10.201.202.236' (ECDSA) to the list of known hosts.
Debian GNU/Linux 8
BeagleBoard.org Debian Image xxxx-xx-xx
Support/FAQ: http://elinux.org/Beagleboard:BeagleBoneBlack_Debian
Output: : Last login: Thu Oct 5 07:32:06 2017 from 10.201.203.29
root#beaglebone:~#
Error: :
Output: : cat /not/a/file
cat: /not/a/file: No such file or directory
root#beaglebone:~#
My hope was that the line cat: /not/a/file: No such file or directory would be printed as the error line above it. However it seems that for some reason the stdout is printing both the output and the error.
ssh pulls back both stdout and stderr on the remote system as a single stream so they both come through on the ssh stdout.
If you want to separate the two streams on the remote system you will have to do that by redirecting one or both of them to a remote file and then reading the files. Or less reliably but possibly easier, you could redirect stderr through a filter that puts something like 'STDERR:' on the start of each line and split the streams again that way.

SSH connection not persistent

I have the following script which SSH's into a network server and executes some commands, for some reason the SSH connection opens but by the time the commands are executed it closes (I think), as a result the commands are failing with below error? Can anyone provide info how to make the SSH connection persistent?
#!/usr/bin/python
import os
import sys
import json
import fileinput
import pwd
from subprocess import Popen, PIPE, STDOUT
import re
import paramiko
import MySQLdb
resource = r'qca-cdit-01'
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(resource, username='username', password='passwordname')
#chan = ssh.get_transport().open_session()
chan = ssh.invoke_shell()
chan.get_pty()
commandstringlist = \
['/local/mnt/workspace/LA_host_builds/AU_LINUX_ANDROID_LA.BF64.1.2.1_RB2.05.01.01.081.031_msm8992',
'cd frameworks/base',
'git fetch ssh://cdit#review-android.company.com:29418/platform/frameworks/base refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD']
for cmd_val in commandstringlist:
#chan.exec_command(cmd_val)
chan.send(cmd_val)
print(chan.recv(1024))
error:
Traceback (most recent call last):
File "ssh_test.py", line 21, in <module>
chan.get_pty()
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 60, in _check
return func(self, *args, **kwds)
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 177, in get_pty
self._wait_for_event()
File "/usr/local/lib/python2.7/dist-packages/paramiko/channel.py", line 1086, in _wait_for_event
raise e
paramiko.ssh_exception.SSHException: Channel closed
Every command you execute using exec_command has a channel of its own and therefore a context of its own. That context includes the working directory. You change the working directory in one context and then try to use it in another. Instead, use the same channel for all the commands. You can either open a channel and use it, or just issue all the commands at once.
commandstringlist = ['cd /local/mnt/workspace/test2 && cd data/log && git fetch ssh://username#review-android.company.com:29418/platform/data/log refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD']
Here are a few other questions that should explain this in more detail.
https://unix.stackexchange.com/questions/80821/why-does-cd-command-not-work-via-ssh
https://superuser.com/questions/46851/keeping-working-directory-across-ssh
https://stackoverflow.com/a/6770272/492773
The trick is to change to the directory before executing the command. You can easily integrate it into your execute_command:
def execute_command (cmd, pwd=None):
if pwd:
cmd = 'cd "%s";%s' % (pwd, cmd)
print cmd
si,so,se = ssh.exec_command(cmd)
print os.getcwd()
print "printing so"
soreadList = so.readlines()
print soreadList
print "printing se"
errList = se.readlines()
print errList
The command is run by a shell on the remote machine, so any shell tricks like setting environment variables could be added also.
Simply do :
#!/usr/bin/python
import os
import sys
import json
import fileinput
import pwd
from subprocess import Popen, PIPE, STDOUT
import re
import paramiko
import MySQLdb
resource = r'qca-cdit-01'
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(resource, username='username', password='passwordname')
#chan = ssh.get_transport().open_session()
chan = ssh.invoke_shell()
commandstringlist = \
['\n/local/mnt/workspace/LA_host_builds/AU_LINUX_ANDROID_LA.BF64.1.2.1_RB2.05.01.01.081.031_msm8992\n',
'\ncd frameworks/base\n',
'\ngit fetch ssh://cdit#review-android.company.com:29418/platform/frameworks/base refs/changes/21/1260821/2 && git cherry-pick FETCH_HEAD\n']
for cmd_val in commandstringlist:
#chan.exec_command(cmd_val)
chan.send(cmd_val)
print(chan.recv(8192))
which worked for me. (You forgot .send() doesn’t add lines automatically)
Can anyone provide info how to make the SSH connection persistent?
The following is an example of how to make an SSH connection persistent by attempting to execute a command and then falling back on connecting then executing the command when errors occur.
import paramiko
class sshConnection:
def __init__( self, host, un, pw ):
self.host = host
self.un = un
self.pw = pw
def connect( self ):
self.ssh = paramiko.SSHClient()
self.ssh.load_system_host_keys()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.host, username=self.un, password=self.pw)
def cmd ( self, cmd, tries=0 ):
self.si, self.so, self.se = None, None, None
try:
self.si, self.so, self.se= self.ssh.exec_command(cmd)
except Exception, e:
if tries > 3:
raise
self.connect( )
self.cmd( cmd, tries+1 )
return self.si, self.so, self.se
conn = sshConnection( "host","username","password" )
si, out, err = conn.cmd("ls -al")
print "\n".join(map ( lambda x: x.strip(), out.readlines()) )

Enter an ssh password using the standard python library (not pexpect)

Related questions that are essentially asking the same thing, but have answers that don't work for me:
Make python enter password when running a csh script
How to interact with ssh using subprocess module
How to execute a process remotely using python
I want to ssh into a remote machine and run one command. For example:
ssh <user>#<ipv6-link-local-addr>%eth0 sudo service fooService status
The problem is that I'm trying to do this through a python script with only the standard libraries (no pexpect). I've been trying to get this to work using the subprocess module, but calling communicate always blocks when requesting a password, even though I supplied the password as an argument to communicate. For example:
proc = subprocess.Popen(
[
"ssh",
"{testUser1}#{testHost1}%eth0".format(**locals()),
"sudo service cassandra status"],
shell=False,
stdin=subprocess.PIPE)
a, b = proc.communicate(input=testPasswd1)
print "a:", a, "b:", b
print "return code: ", proc.returncode
I've tried a number of variants of the above, as well (e.g., removing "input=", adding/removing subprocess.PIPE assignments to stdout and sterr). However, the result is always the same prompt:
ubuntu#<ipv6-link-local-addr>%eth0's password:
Am I missing something? Or is there another way to achieve this using the python standard libraries?
This answer is just an adaptation of this answer by Torxed, which I recommend you go upvote. It simply adds the ability to capture the output of the command you execute on the remote server.
import pty
from os import waitpid, execv, read, write
class ssh():
def __init__(self, host, execute='echo "done" > /root/testing.txt',
askpass=False, user='root', password=b'SuperSecurePassword'):
self.exec_ = execute
self.host = host
self.user = user
self.password = password
self.askpass = askpass
self.run()
def run(self):
command = [
'/usr/bin/ssh',
self.user+'#'+self.host,
'-o', 'NumberOfPasswordPrompts=1',
self.exec_,
]
# PID = 0 for child, and the PID of the child for the parent
pid, child_fd = pty.fork()
if not pid: # Child process
# Replace child process with our SSH process
execv(command[0], command)
## if we havn't setup pub-key authentication
## we can loop for a password promt and "insert" the password.
while self.askpass:
try:
output = read(child_fd, 1024).strip()
except:
break
lower = output.lower()
# Write the password
if b'password:' in lower:
write(child_fd, self.password + b'\n')
break
elif b'are you sure you want to continue connecting' in lower:
# Adding key to known_hosts
write(child_fd, b'yes\n')
else:
print('Error:',output)
# See if there's more output to read after the password has been sent,
# And capture it in a list.
output = []
while True:
try:
output.append(read(child_fd, 1024).strip())
except:
break
waitpid(pid, 0)
return ''.join(output)
if __name__ == "__main__":
s = ssh("some ip", execute="ls -R /etc", askpass=True)
print s.run()
Output:
/etc:
adduser.conf
adjtime
aliases
alternatives
apm
apt
bash.bashrc
bash_completion.d
<and so on>

python subprocess communicate freezes

I have the following python code that hangs :
cmd = ["ssh", "-tt", "-vvv"] + self.common_args
cmd += [self.host]
cmd += ["cat > %s" % (out_path)]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(in_string)
It is supposed to save a string (in_string) into a remote file over ssh.
The file is correctly saved but then the process hangs. If I use
cmd += ["echo"] instead of
cmd += ["cat > %s" % (out_path)]
the process does not hang so I am pretty sure that I misunderstand something about the way communicate considers that the process has exited.
do you know how I should write the command so the the "cat > file" does not make communicate hang ?
-tt option allocates tty that prevents the child process to exit when .communicate() closes p.stdin (EOF is ignored). This works:
import pipes
from subprocess import Popen, PIPE
cmd = ["ssh", self.host, "cat > " + pipes.quote(out_path)] # no '-tt'
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(in_string)
You could use paramiko -- pure Python ssh library, to write data to a remote file via ssh:
#!/usr/bin/env python
import os
import posixpath
import sys
from contextlib import closing
from paramiko import SSHConfig, SSHClient
hostname, out_path, in_string = sys.argv[1:] # get from command-line
# load parameters to setup ssh connection
config = SSHConfig()
with open(os.path.expanduser('~/.ssh/config')) as config_file:
config.parse(config_file)
d = config.lookup(hostname)
# connect
with closing(SSHClient()) as ssh:
ssh.load_system_host_keys()
ssh.connect(d['hostname'], username=d.get('user'))
with closing(ssh.open_sftp()) as sftp:
makedirs_exists_ok(sftp, posixpath.dirname(out_path))
with sftp.open(out_path, 'wb') as remote_file:
remote_file.write(in_string)
where makedirs_exists_ok() function mimics os.makedirs():
from functools import partial
from stat import S_ISDIR
def isdir(ftp, path):
try:
return S_ISDIR(ftp.stat(path).st_mode)
except EnvironmentError:
return None
def makedirs_exists_ok(ftp, path):
def exists_ok(mkdir, name):
"""Don't raise an error if name is already a directory."""
try:
mkdir(name)
except EnvironmentError:
if not isdir(ftp, name):
raise
# from os.makedirs()
head, tail = posixpath.split(path)
if not tail:
assert path.endswith(posixpath.sep)
head, tail = posixpath.split(head)
if head and tail and not isdir(ftp, head):
exists_ok(partial(makedirs_exists_ok, ftp), head) # recursive call
# do create directory
assert isdir(ftp, head)
exists_ok(ftp.mkdir, path)
It makes sense that the cat command hangs. It is waiting for an EOF. I tried sending an EOF in the string but couldn't get it to work. Upon researching this question, I found a great module for streamlining the use of SSH for command line tasks like your cat example. It might not be exactly what you need for your usecase, but it does do what your question asks.
Install fabric with
pip install fabric
Inside a file called fabfile.py put
from fabric.api import run
def write_file(in_string, path):
run('echo {} > {}'.format(in_string,path))
And then run this from the command prompt with,
fab -H username#host write_file:in_string=test,path=/path/to/file

Categories