link several Popen commands with pipes - python

I know how to run a command using cmd = subprocess.Popen and then subprocess.communicate.
Most of the time I use a string tokenized with shlex.split as 'argv' argument for Popen.
Example with "ls -l":
import subprocess
import shlex
print subprocess.Popen(shlex.split(r'ls -l'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
However, pipes seem not to work... For instance, the following example returns noting:
import subprocess
import shlex
print subprocess.Popen(shlex.split(r'ls -l | sed "s/a/b/g"'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
Can you tell me what I am doing wrong please?
Thx

I think you want to instantiate two separate Popen objects here, one for 'ls' and the other for 'sed'. You'll want to pass the first Popen object's stdout attribute as the stdin argument to the 2nd Popen object.
Example:
p1 = subprocess.Popen('ls ...', stdout=subprocess.PIPE)
p2 = subprocess.Popen('sed ...', stdin=p1.stdout, stdout=subprocess.PIPE)
print p2.communicate()
You can keep chaining this way if you have more commands:
p3 = subprocess.Popen('prog', stdin=p2.stdout, ...)
See the subprocess documentation for more info on how to work with subprocesses.

I've made a little function to help with the piping, hope it helps. It will chain Popens as needed.
from subprocess import Popen, PIPE
import shlex
def run(cmd):
"""Runs the given command locally and returns the output, err and exit_code."""
if "|" in cmd:
cmd_parts = cmd.split('|')
else:
cmd_parts = []
cmd_parts.append(cmd)
i = 0
p = {}
for cmd_part in cmd_parts:
cmd_part = cmd_part.strip()
if i == 0:
p[i]=Popen(shlex.split(cmd_part),stdin=None, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=p[i-1].stdout, stdout=PIPE, stderr=PIPE)
i = i +1
(output, err) = p[i-1].communicate()
exit_code = p[0].wait()
return str(output), str(err), exit_code
output, err, exit_code = run("ls -lha /var/log | grep syslog | grep gz")
if exit_code != 0:
print "Output:"
print output
print "Error:"
print err
# Handle error here
else:
# Be happy :D
print output

shlex only splits up spaces according to the shell rules, but does not deal with pipes.
It should, however, work this way:
import subprocess
import shlex
sp_ls = subprocess.Popen(shlex.split(r'ls -l'), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
sp_sed = subprocess.Popen(shlex.split(r'sed "s/a/b/g"'), stdin = sp_ls.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
sp_ls.stdin.close() # makes it similiar to /dev/null
output = sp_ls.communicate()[0] # which makes you ignore any errors.
print output
according to help(subprocess)'s
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
HTH

"""
Why don't you use shell
"""
def output_shell(line):
try:
shell_command = Popen(line, stdout=PIPE, stderr=PIPE, shell=True)
except OSError:
return None
except ValueError:
return None
(output, err) = shell_command.communicate()
shell_command.wait()
if shell_command.returncode != 0:
print "Shell command failed to execute"
return None
return str(output)

Thank #hernvnc, #glglgl, and #Jacques Gaudin for the answers. I fixed the code from #hernvnc. His version will cause hanging in some scenarios.
import shlex
from subprocess import PIPE
from subprocess import Popen
def run(cmd, input=None):
"""Runs the given command locally and returns the output, err and exit_code."""
if "|" in cmd:
cmd_parts = cmd.split('|')
else:
cmd_parts = []
cmd_parts.append(cmd)
i = 0
p = {}
for cmd_part in cmd_parts:
cmd_part = cmd_part.strip()
if i == 0:
if input:
p[i]=Popen(shlex.split(cmd_part),stdin=PIPE, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=None, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=p[i-1].stdout, stdout=PIPE, stderr=PIPE)
i = i +1
# close the stdin explicitly, otherwise, the following case will hang.
if input:
p[0].stdin.write(input)
p[0].stdin.close()
(output, err) = p[i-1].communicate()
exit_code = p[0].wait()
return str(output), str(err), exit_code
# test case below
inp = b'[ CMServer State ]\n\nnode node_ip instance state\n--------------------------------------------\n1 linux172 10.90.56.172 1 Primary\n2 linux173 10.90.56.173 2 Standby\n3 linux174 10.90.56.174 3 Standby\n\n[ ETCD State ]\n\nnode node_ip instance state\n--------------------------------------------------\n1 linux172 10.90.56.172 7001 StateFollower\n2 linux173 10.90.56.173 7002 StateLeader\n3 linux174 10.90.56.174 7003 StateFollower\n\n[ Cluster State ]\n\ncluster_state : Normal\nredistributing : No\nbalanced : No\ncurrent_az : AZ_ALL\n\n[ Datanode State ]\n\nnode node_ip instance state | node node_ip instance state | node node_ip instance state\n------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n1 linux172 10.90.56.172 6001 P Standby Normal | 2 linux173 10.90.56.173 6002 S Primary Normal | 3 linux174 10.90.56.174 6003 S Standby Normal'
cmd = "grep -E 'Primary' | tail -1 | awk '{print $3}'"
run(cmd, input=inp)

Related

Python subprocess Popen get command output when using os.waitpid()

How to extract the stdout of the executed command when I use waitpid() during subprocess.Popen()?
p = subprocess.Popen(['ls'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
retcode = os.waitpid(p.pid, 0)[1]
When I use,
p.communicate()
it returns empty ('','')
ok, I use it like this now:
p = subprocess.Popen(['ls'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err = p.stderr.read()
retcode = os.waitpid(p.pid, 0)[1]

python subprocess handle exception

I am trying to handle exception and errors from Popen, but I can handle it onle from send. How can I handle it from tar and pigz ?
here is my code:
try:
tar = Popen("tar cPf - %s" %pth, shell=True, stderr=PIPE, stdout=PIPE)
pigz = Popen("pigz -1 -kc", stdin=tar.stdout, shell=True, stdout=PIPE, stderr=PIPE)
send = Popen("./s3cmd -c ./.s3cfg sync - s3://%s/%s.tar.gz" %(bucket_name,filename), stdin=pigz.stdout, shell=True, stderr=PIPE, stdout=PIPE)
send_err = send.communicate()[1]
tar.stdout.close()
pigz.stdout.close()
tar.wait()
pigz.wait()
if send_err:
print send_err
return False
else:
return True
except:
return False

Python subprocess: how to use pipes thrice? [duplicate]

This question already has answers here:
How do I use subprocess.Popen to connect multiple processes by pipes?
(9 answers)
Closed 8 years ago.
I'd like to use subprocess on the following line:
convert ../loxie-orig.png bmp:- | mkbitmap -f 2 -s 2 -t 0.48 | potrace -t 5 --progress -s -o ../DSC00232.svg
I found thank to other posts the subprocess documentation but in the example we use only twice pipe.
So, I try for two of the three commands and it works
p1 = subprocess.Popen(['convert', fileIn, 'bmp:-'], stdout=subprocess.PIPE)
# p2 = subprocess.Popen(['mkbitmap', '-f', '2', '-s', '2', '-t', '0.48'], stdout=subprocess.PIPE)
p3 = subprocess.Popen(['potrace', '-t' , '5', '-s' , '-o', fileOut], stdin=p1.stdout,stdout=subprocess.PIPE)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p3 exits.
output = p3.communicate()[0]
Can you help me for the third command?
Thank you very much.
Just add a third command following the same example:
p1 = subprocess.Popen(['convert', fileIn, 'bmp:-'], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['mkbitmap', '-f', '2', '-s', '2', '-t', '0.48'],
stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close()
p3 = subprocess.Popen(['potrace', '-t' , '5', '-s' , '-o', fileOut],
stdin=p2.stdout,stdout=subprocess.PIPE)
p2.stdout.close()
output = p3.communicate()[0]
def runPipe(cmds):
try:
p1 = subprocess.Popen(cmds[0].split(' '), stdin = None, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
prev = p1
for cmd in cmds[1:]:
p = subprocess.Popen(cmd.split(' '), stdin = prev.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
prev = p
stdout, stderr = p.communicate()
p.wait()
returncode = p.returncode
except Exception, e:
stderr = str(e)
returncode = -1
if returncode == 0:
return (True, stdout.strip().split('\n'))
else:
return (False, stderr)
Then execute it like:
runPipe(['ls -1','head -n 2', 'head -n 1'])
Use subprocess.Popen() with the option shell=True, and you can pass it your entire command as a single string.
This is the simplest solution and makes it possible to embed a complicated pipeline in python without head-scratching; but in some cases it might not work, e.g. (as #torek commented) if there are spaces in the filenames passed for input or output. In that case, take the trouble to build up the robust solution in the accepted answer.

raw_input in python variable to cmd

How can I take this code, and take the raw_input and add "ping" before it in cmd, so it'll look like this in command prompt: [ping (raw_input)]
HERE IS THE CODE:
RS_usr = raw_input('input user here: ')
from subprocess import Popen, PIPE, STDOUT
p = Popen(['cmd.exe'], stdout=PIPE, stdin=PIPE, stderr=STDOUT)
out,err = p.communicate(input=RS_usr)
print(out)
If I understood you correctly you should change a couple of lines in your script:
p = Popen(['cmd.exe', '/k', 'set PROMPT=[Hello, (' + RS_usr + ')]' ], stdout=PIPE, stdin=PIPE, stderr=STDOUT)
out,err = p.communicate()

How to get the last N lines of a subprocess' stderr stream output?

I am a Python newbie writing a Python (2.7) script that needs to exec a number of external applications, one of which writes a lot of output to its stderr stream. What I am trying to figure out is a concise and succinct way (in Python) to get the last N lines from that subprocess' stderr output stream.
Currently, I am running that external application from my Python script like so:
p = subprocess.Popen('/path/to/external-app.sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print "ERROR: External app did not complete successfully (error code is " + str(p.returncode) + ")"
print "Error/failure details: ", stderr
status = False
else:
status = True
I'd like to capture the last N lines of output from its stderr stream so that they can be written to a log file or emailed, etc.
N = 3 # for 3 lines of output
p = subprocess.Popen(['/path/to/external-app.sh'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print ("ERROR: External app did not complete successfully "
"(error code is %s)" % p.returncode)
print "Error/failure details: ", '\n'.join(stderr.splitlines()[-N:])
status = False
else:
status = True
If the whole output can't be stored in RAM then:
import sys
from collections import deque
from subprocess import Popen, PIPE
from threading import Thread
ON_POSIX = 'posix' in sys.builtin_module_names
def start_thread(func, *args):
t = Thread(target=func, args=args)
t.daemon = True
t.start()
return t
def consume(infile, output):
for line in iter(infile.readline, ''):
output(line)
infile.close()
p = Popen(['cat', sys.argv[1]], stdout=PIPE, stderr=PIPE,
bufsize=1, close_fds=ON_POSIX)
# preserve last N lines of stdout, print stderr immediately
N = 100
queue = deque(maxlen=N)
threads = [start_thread(consume, *args)
for args in (p.stdout, queue.append), (p.stderr, sys.stdout.write)]
for t in threads: t.join() # wait for IO completion
print ''.join(queue), # print last N lines
retcode = p.wait()

Categories