Python script to ssh to multiple switches send output of commands - python

Fairly new to python. I'm able to ssh to a switch and get the data needed using the below script but I need to pull this information from 100+ switches. What do I need to add to this script to achieve that? Thanks
from paramiko import client
class ssh:
client = None
def __init__(self, address, username, password):
# Let the user know we're connecting to the server
print("Connecting to switch.")
# Create a new SSH client
self.client = client.SSHClient()
# The following line is required if you want the script to be able to access a server that's not yet in the known_hosts file
self.client.set_missing_host_key_policy(client.AutoAddPolicy())
# Make the connection
self.client.connect(address, username=username, password=password, look_for_keys=False)
def sendCommand(self, command):
if(self.client):
stdin, stdout, stderr = self.client.exec_command(command)
while not stdout.channel.exit_status_ready():
# Print data when available
if stdout.channel.recv_ready():
alldata = stdout.channel.recv(1024)
prevdata = b"1"
while prevdata:
prevdata = stdout.channel.recv(1024)
alldata += prevdata
print(str(alldata, "utf8"))
else:
print("Connection not opened.")
connection = ssh("x.x.x.x", "user", "pwd")
connection.sendCommand("show int status | e not")

swiches mean instance? so you want to run command on muti-machine? your can try fabric.
#roles("instance-group")
#task
def upload():
"""上传代码到测试服务器"""
local("tar -zc -f /tmp/btbu-spider.tar.gz ../BTBU-Spider")
put("/tmp/btbu-spider.tar.gz", "/tmp/")
local('rm /tmp/btbu-spider.tar.gz')
run("tar -zx -f /tmp/btbu-spider.tar.gz -C ~/test/")
then you can define "instance-group" ssh ip, you can call this function from local. but actually all this command are run at remote instance(not local function).

Add the information about all the switches into a configuration file, for example like:
address1,username1,password1
address2,username2,password2
Each switch can be on a new line and arguments can be separated by just using a comma for ease of use. Then, open the file and read lines one by one and parse them:
with open('switches.conf', 'r') as f:
for line in f: # Read line by line
params = line.split(',') # Split the three arguments by comma
address = params[0]
username = params[1]
password = params[2]
connection = ssh(address, username, password)
connection.sendCommand("show int status | e not")
with open(address, 'w+') as wf: # If you have to write any information about the switches, suggesting this method
wf.write(..information to write..)
That must be done in the cycle. You can just call two functions in that cycle, the function that initializes the SSH connection and the function that gets the information, while this could be your main function. Of course the example for configuration is just a guideline, it has a lot of flaws, especially security ones, so you can make it however you want. Anyway, the idea is that you just need a cycle to do that and by having a configuration file to read from makes it a lot easier :) And you can save the information to different text files that are named by the addresses of the instances for example, as #Santosh Kumar suggested in the comments.
EDIT: Edited my answer and added an example for the connection and sending the command, as I hadn't noticed it was a class.

You can use parallel-ssh for running ssh cmds on multiple machines. Get it by running `pip install parallel-ssh
from pssh.clients import ParallelSSHClient
hosts = ['switch1', 'switch2']
client = ParallelSSHClient(hosts, user='my_user', password='my_pass')
output = client.run_command('show int status | e not')
for host, host_output in output.items():
print host, "".join(host_output.stdout)

Related

using paramiko to ssh into begaboneblack, running python program does not show output [duplicate]

I am having issues passing responses to a bash script on a remote server over SSH.
I am writing a program in Python 3.6.5 that will SSH to a remote Linux server.
On this remote Linux server there is a bash script that I am running which requires user input to fill in. For whatever reason I cannot pass a user input from my original python program over SSH and have it fill in the bash script user input questions.
main.py
from tkinter import *
import SSH
hostname = 'xxx'
username = 'xxx'
password = 'xxx'
class Connect:
def module(self):
name = input()
connection = SSH.SSH(hostname, username, password)
connection.sendCommand(
'cd xx/{}/xxxxx/ && source .cshrc && ./xxx/xxxx/xxxx/xxxxx'.format(path))
SSH.py
from paramiko import client
class SSH:
client = None
def __init__(self, address, username, password):
print("Login info sent.")
print("Connecting to server.")
self.client = client.SSHClient() # Create a new SSH client
self.client.set_missing_host_key_policy(client.AutoAddPolicy())
self.client.connect(
address, username=username, password=password, look_for_keys=False) # connect
def sendCommand(self, command):
print("Sending your command")
# Check if connection is made previously
if (self.client):
stdin, stdout, stderr = self.client.exec_command(command)
while not stdout.channel.exit_status_ready():
# Print stdout data when available
if stdout.channel.recv_ready():
# Retrieve the first 1024 bytes
alldata = stdout.channel.recv(1024)
while stdout.channel.recv_ready():
# Retrieve the next 1024 bytes
alldata += stdout.channel.recv(1024)
# Print as string with utf8 encoding
print(str(alldata, "utf8"))
else:
print("Connection not opened.")
The final /xxxxxx in class Connect is the remote script that is launched.
It will open a text response awaiting a format such as
What is your name:
and I cannot seem to find a way to properly pass the response to the script from my main.py file within the class Connect.
Every way I have tried to pass name as an argument or a variable the answer seems to just disappear (likely since it is trying to print it at the Linux prompt and not within the bash script)
I think using the read_until function to look for the : at the end of the question may work.
Suggestions?
Write the input that your command needs to the stdin:
stdin, stdout, stderr = self.client.exec_command(command)
stdin.write(name + '\n')
stdin.flush()
(You will of course need to propagate the name variable from module to sendCommand, but I assume you know how to do that part).

How to keep ssh connection open and doing multiple requests and outputs within python

Because this question seems to aim somewhere else I am going to point my problem here:
In my python script I am using multiple requests to a remote server using ssh:
def ssh(command):
command = 'ssh SERVER "command"'
output = subprocess.check_output(
command,
stderr=subprocess.STDOUT,
shell=True,
universal_newlines=True
)
return output
here I will get the content of file1 as output.
I have now multiple methods which use this function:
def show_one():
ssh('cat file1')
def show_two():
ssh('cat file2')
def run():
one = show_one()
print(one)
two = show_two()
print(two)
Executing run() will open and close the ssh connection for each show_* method which makes it pretty slow.
Solutions:
I can put:
Host SERVER
ControlMaster auto
ControlPersist yes
ControlPath ~/.ssh/socket-%r#%h:%p
into my .ssh/config but I would like to solve this within python.
There is the ssh flag -T to keep a connection open, and in the before mentioned Question one answer was to use this with Popen() and p.communicate() but it is not possible to get the output between the communicates because it throws an error ValueError: Cannot send input after starting communication
I could somehow change my functions to execute a single ssh command like echo "--show1--"; cat file1; echo "--show2--"; cat file2 but this looks hacky to me and I hope there is a better method to just keep the ssh connection open and use it like normal.
What I would like to have: For example a pythonic/bashic to do the same as I can configure in the .ssh/config (see 1.) to declare a specific socket for the connection and explicitly open, use, close it
Try to create ssh object from class and pass it to the functions:
import paramiko
from pythonping import ping
from scp import SCPClient
class SSH():
def __init__(self, ip='192.168.1.1', username='user', password='pass',connect=True,Timeout=10):
self.ip = ip
self.username = username
self.password = password
self.Timeout=Timeout
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if connect:
self.OpenConnection()
self.scp = SCPClient(self.ssh.get_transport())
def OpenConnection(self):
try:
skip_ping = False
ping_res=False
log.info('Sending ping to host (timeout=3,count=3) :'+self.ip)
try:
PingRes = ping(target=self.ip,timeout=3,count=3, verbose=True)
log.info('Ping to host result :' + str(PingRes.success()))
ping_res=PingRes.success()
except:
skip_ping=True
if ping_res or skip_ping:
log.info('Starting to open connection....')
self.ssh.connect(hostname=self.ip, username=self.username, password=self.password, timeout=self.Timeout, auth_timeout=self.Timeout,banner_timeout=self.Timeout)
self.scp = SCPClient(self.ssh.get_transport())
log.info('Connection open')
return True
else:
log.error('ssh OpenConnection failed: No Ping to host')
return False
myssh = SSH(ip='192.168.1.1',password='mypass',username='myusername')
the ping result is wrapped in try catch because sometimes my machine return an error you can remove it and just verify a ping to the host.
The self.scp is for file transfer.

It's possible anwser yes/no to a command on paramiko ssh? [duplicate]

I am having issues passing responses to a bash script on a remote server over SSH.
I am writing a program in Python 3.6.5 that will SSH to a remote Linux server.
On this remote Linux server there is a bash script that I am running which requires user input to fill in. For whatever reason I cannot pass a user input from my original python program over SSH and have it fill in the bash script user input questions.
main.py
from tkinter import *
import SSH
hostname = 'xxx'
username = 'xxx'
password = 'xxx'
class Connect:
def module(self):
name = input()
connection = SSH.SSH(hostname, username, password)
connection.sendCommand(
'cd xx/{}/xxxxx/ && source .cshrc && ./xxx/xxxx/xxxx/xxxxx'.format(path))
SSH.py
from paramiko import client
class SSH:
client = None
def __init__(self, address, username, password):
print("Login info sent.")
print("Connecting to server.")
self.client = client.SSHClient() # Create a new SSH client
self.client.set_missing_host_key_policy(client.AutoAddPolicy())
self.client.connect(
address, username=username, password=password, look_for_keys=False) # connect
def sendCommand(self, command):
print("Sending your command")
# Check if connection is made previously
if (self.client):
stdin, stdout, stderr = self.client.exec_command(command)
while not stdout.channel.exit_status_ready():
# Print stdout data when available
if stdout.channel.recv_ready():
# Retrieve the first 1024 bytes
alldata = stdout.channel.recv(1024)
while stdout.channel.recv_ready():
# Retrieve the next 1024 bytes
alldata += stdout.channel.recv(1024)
# Print as string with utf8 encoding
print(str(alldata, "utf8"))
else:
print("Connection not opened.")
The final /xxxxxx in class Connect is the remote script that is launched.
It will open a text response awaiting a format such as
What is your name:
and I cannot seem to find a way to properly pass the response to the script from my main.py file within the class Connect.
Every way I have tried to pass name as an argument or a variable the answer seems to just disappear (likely since it is trying to print it at the Linux prompt and not within the bash script)
I think using the read_until function to look for the : at the end of the question may work.
Suggestions?
Write the input that your command needs to the stdin:
stdin, stdout, stderr = self.client.exec_command(command)
stdin.write(name + '\n')
stdin.flush()
(You will of course need to propagate the name variable from module to sendCommand, but I assume you know how to do that part).

Is it possible to send a string to a running python script using paramiko? [duplicate]

I am having issues passing responses to a bash script on a remote server over SSH.
I am writing a program in Python 3.6.5 that will SSH to a remote Linux server.
On this remote Linux server there is a bash script that I am running which requires user input to fill in. For whatever reason I cannot pass a user input from my original python program over SSH and have it fill in the bash script user input questions.
main.py
from tkinter import *
import SSH
hostname = 'xxx'
username = 'xxx'
password = 'xxx'
class Connect:
def module(self):
name = input()
connection = SSH.SSH(hostname, username, password)
connection.sendCommand(
'cd xx/{}/xxxxx/ && source .cshrc && ./xxx/xxxx/xxxx/xxxxx'.format(path))
SSH.py
from paramiko import client
class SSH:
client = None
def __init__(self, address, username, password):
print("Login info sent.")
print("Connecting to server.")
self.client = client.SSHClient() # Create a new SSH client
self.client.set_missing_host_key_policy(client.AutoAddPolicy())
self.client.connect(
address, username=username, password=password, look_for_keys=False) # connect
def sendCommand(self, command):
print("Sending your command")
# Check if connection is made previously
if (self.client):
stdin, stdout, stderr = self.client.exec_command(command)
while not stdout.channel.exit_status_ready():
# Print stdout data when available
if stdout.channel.recv_ready():
# Retrieve the first 1024 bytes
alldata = stdout.channel.recv(1024)
while stdout.channel.recv_ready():
# Retrieve the next 1024 bytes
alldata += stdout.channel.recv(1024)
# Print as string with utf8 encoding
print(str(alldata, "utf8"))
else:
print("Connection not opened.")
The final /xxxxxx in class Connect is the remote script that is launched.
It will open a text response awaiting a format such as
What is your name:
and I cannot seem to find a way to properly pass the response to the script from my main.py file within the class Connect.
Every way I have tried to pass name as an argument or a variable the answer seems to just disappear (likely since it is trying to print it at the Linux prompt and not within the bash script)
I think using the read_until function to look for the : at the end of the question may work.
Suggestions?
Write the input that your command needs to the stdin:
stdin, stdout, stderr = self.client.exec_command(command)
stdin.write(name + '\n')
stdin.flush()
(You will of course need to propagate the name variable from module to sendCommand, but I assume you know how to do that part).

How to use boto.manage.cmdshell with ssh-agent?

I'm using boto.manage.cmdshell to create an SSH connection to EC2 instances. Currently every time the user has to enter its password to encrypt the pkey (e.g. ~/.ssh/id_rsa).
Now I want to make the work-flow more convenient for the users and support ssh-agent.
So far I tried without any success:
set ssh_key_file to None when creating FakeServer:
The result was: SSHException('Key object may not be empty')
set ssh_pwd to None when creating SSHClient:
The result was: paramiko.ssh_exception.PasswordRequiredException: Private key file is encrypted
Is there a way to use ssh-agent with boto.manage.cmdshell? I know that paramiko supports it, which boto is using.
(There's another stackoverflow page with some related answers)
Can't get amazon cmd shell to work through boto
However, you're definitely better using per-person SSH keys. But if you have those, are they in the target host's authorized_keys file? If so, users just add their key normally with ssh-add (in an ssh-agent session, usually the default in Linux). You need to test with ssh itself first, so that ssh-agent/-add issues are clearly resolved beforehand.
Once certain they work with ssh normally, the problem is whether boto thought ssh-agent at all. Paramiko's SSHClient() can, if I remember correctly - the paramiko code I remember looks roughly like:
paramiko.SSHClient().connect(host, timeout=10, username=user,
key_filename=seckey, compress=True)
The seckey was optional, so the key_filename would be empty, and that invoked checking the ssh-agent. Boto's version seems to want to force using a private key file with an explicit call like this, I think with the idea that each instance will have an assigned key and password to decrypt it:
self._pkey = paramiko.RSAKey.from_private_key_file(server.ssh_key_file,
password=ssh_pwd)
If so, it means that using boto directly conflicts with using ssh-agent and the standard model of per-user logins and logging of connections by user.
The paramiko.SSHClient() is much more capable, and documents ssh-agent support explicitly (from pydoc paramiko.SSHClient):
Authentication is attempted in the following order of priority:
- The C{pkey} or C{key_filename} passed in (if any)
- Any key we can find through an SSH agent
- Any "id_rsa" or "id_dsa" key discoverable in C{~/.ssh/}
- Plain username/password auth, if a password was given
Basically, you'd have to use paramiko instead of boto.
We had one issue with paramiko: The connection would not be ready immediately in many cases, requiring sending a test command through and checkout output before sending real commands. Part of this was that we'd start firing off SSH commands (with paramiko) right after creating and EC2 or VPC instance, so there was no guarantee it'd be listening for an SSH connect, and paramiko would tend to lose commands delivered too soon. We used some code like this to cope:
def SshCommand(**kwargs):
'''
Run a command on a remote host via SSH.
Connect to the given host=<host-or-ip>, as user=<user> (defaulting to
$USER), with optional seckey=<secret-key-file>, timeout=<seconds>
(default 10), and execute a single command=<command> (assumed to be
addressing a unix shell at the far end.
Returns the exit status of the remote command (otherwise would be
None save that an exception should be raised instead).
Example: SshCommand(host=host, user=user, command=command, timeout=timeout,
seckey=seckey)
'''
remote_exit_status = None
if debug:
sys.stderr.write('SshCommand kwargs: %r\n' % (kwargs,))
paranoid = True
host = kwargs['host']
user = kwargs['user'] if kwargs['user'] else os.environ['USER']
seckey = kwargs['seckey']
timeout = kwargs['timeout']
command = kwargs['command']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
time_end = time.time() + int(timeout)
ssh_is_up = False
while time.time() < time_end:
try:
ssh.connect(host, timeout=10, username=user, key_filename=seckey,
compress=True)
if paranoid:
token_generator = 'echo xyz | tr a-z A-Z'
token_result = 'XYZ' # possibly buried in other text
stdin, stdout, stderr = ssh.exec_command(token_generator)
lines = ''.join(stdout.readlines())
if re.search(token_result, lines):
ssh_is_up = True
if debug:
sys.stderr.write("[%d] command stream is UP!\n"
% time.time())
break
else:
ssh_is_up = True
break
except paramiko.PasswordRequiredException as e:
sys.stderr.write("usage idiom clash: %r\n" % (e,))
return False
except Exception as e:
sys.stderr.write("[%d] command stream not yet available\n"
% time.time())
if debug:
sys.stderr.write("exception is %r\n" % (e,))
time.sleep(1)
if ssh_is_up:
# ideally this is where Bcfg2 or Chef or such ilk get called.
# stdin, stdout, stderr = ssh.exec_command(command)
chan = ssh._transport.open_session()
chan.exec_command(command)
# note that out/err doesn't have inter-stream ordering locked down.
stdout = chan.makefile('rb', -1)
stderr = chan.makefile_stderr('rb', -1)
sys.stdout.write(''.join(stdout.readlines()))
sys.stderr.write(''.join(stderr.readlines()))
remote_exit_status = chan.recv_exit_status()
if debug:
sys.stderr.write('exit status was: %d\n' % remote_exit_status)
ssh.close()
if None == remote_exit_status:
raise SSHException('remote command result undefined')
return remote_exit_status
We were also trying to enforce not logging into prod directly, so this particular wrapper (an ssh-send-command script) encourage scripting despite the vagaries of whether Amazon had bothered to start the instance in a timely fashion.
I found a solution to my problem by creating a class SSHClientAgent which inherited from boto.manage.cmdshell.SSHClient and overwrites the __init__(). In the new __init__() I replaced the call to paramiko.RSAKey.from_private_key_file() with None.
Here is my new class:
class SSHClientAgent(boto.manage.cmdshell.SSHClient):
def __init__(self, server,
host_key_file='~/.ssh/known_hosts',
uname='root', timeout=None, ssh_pwd=None):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
self._timeout = timeout
# replace the call to get the private key
self._pkey = None
self._ssh_client = paramiko.SSHClient()
self._ssh_client.load_system_host_keys()
self._ssh_client.load_host_keys(os.path.expanduser(host_key_file))
self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect()
In my function where I create the ssh connection I check for the environment variable SSH_AUTH_SOCK and decide which ssh client to create.

Categories