I am trying to execute two commands in parallel for 10 seconds using the following piece of code, but the whole process takes more than 10 seconds as you can see in the output. Would you please help me to better understand the reason and the best solution for this question.
stime = datetime.datetime.now()
print stime
commands = ("sudo /usr/local/bin/snort -v -u snort -g snort -c /usr/local/snort/etc/snort.conf -i eth0 &", "sudo gedit test")
for p in commands:
p = subprocess.Popen(shlex.split(p), stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
class Alarm(Exception):
pass
def alarm_handler(signum, frame):
raise Alarm
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(10) #in seconds
try:
stdoutdata, stderrdata = p.communicate()
signal.alarm(0) #reset the alarm
except Alarm:
print 'Ooops, taking too long!!!!'
etime = datetime.datetime.now()
print etime
And the output:
2013-01-08 03:30:00.836412
Ooops, taking too long!!!!
2013-01-08 03:30:16.548519
I feel like a threading.Timer might be more appropriate:
from threading import Timer
from subprocess import Popen,PIPE
import shlex
import datetime
import sys
jobs = ['sleep 100','sleep 200']
timers = []
processes = []
print datetime.datetime.now()
for job in jobs:
p = Popen(shlex.split(job),stdout = PIPE)
t = Timer(10,lambda p=p: p.terminate())
t.start()
timers.append(t)
processes.append(p)
for t in timers:
t.join()
stdout,stderr = processes[0].communicate()
stdout,stderr = processes[1].communicate()
print datetime.datetime.now()
import multiprocessing
import subprocess
import shlex
import time
commands = ("echo -n HI-FIRST ", "echo -n HI-SECOND ")
def parallel():
p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrdata = p.communicate()
print stdoutdata + "\t" + time.ctime()
for cmd in commands:
p = multiprocessing.Process(target=parallel)
p.start()
Output:
$ python stack.py
HI-FIRST Fri Jan 11 08:47:18 2013
HI-SECOND Fri Jan 11 08:47:18 2013
Related
I want Python to kill this process if the output of the child process meets the criteria
For example this is a infinity loop while_file.py, it print 0 to 999 then looks like there's no response.
i = 0
while 1:
if i < 1000:
print i
i += 1
I want to check if the output of the child process is 999, then kill it.
import os
import signal
import subprocess
def run_cmd(cmd):
pro = subprocess.Popen(cmd, stdout=subprocess.PIPE,
shell=True, preexec_fn=os.setsid)
while True:
r = pro.stdout.read()
print r
if r == 999:
os.killpg(os.getpgid(pro.pid), signal.SIGTERM)
if __name__=='__main__':
print run_cmd('python while_file.py')
But it seems no response... why? Is it be blocked?
There are a couple of problems:
"read" function (from pro.stoud.read) reads the whole file (until EOF). Which in your case will not work because the stdout is never close, which means no EOF. You should use readline
when you are reading you are reading strings not numbers (so you should compare with "999" not with 999)
I would recommend making sure there is no buffering (can get nasty later if something stays in a buffer)
The changed codes:
import sys
i = 0
while True:
if i < 1000:
print i
sys.stdout.flush()
i += 1
and
import os
import signal
import subprocess
def run_cmd(cmd):
pro = subprocess.Popen(cmd, stdout=subprocess.PIPE,
shell=True, preexec_fn=os.setsid)
while True:
r = pro.stdout.readline()
r = r.strip()
print(r)
if r == "999":
os.killpg(os.getpgid(pro.pid), signal.SIGTERM)
print("Process killed")
break
if __name__=='__main__':
print run_cmd('PYTHONUNBUFFERED=1; python a.py')
I don't know if it is a good idea or not, but here is a way you can do it:
while_file.py
i = 0
while 1:
if i < 1000:
print(i)
i += 1
stackoverflow.py
import os
import signal
import subprocess
def run_cmd(cmd):
pro = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
for line in iter(pro.stdout.readline, ''):
r = line.rstrip()
print(r)
if r == b'999':
os.killpg(os.getpgid(pro.pid), signal.SIGTERM)
print("True !")
if __name__ == '__main__':
print(run_cmd('python while_file.py'))
From a Python script, I need to call a PL->EN translation service. The translation requires 3 steps: tokenization, translation, detoknization
From Linux, I can achieve this using 3 processes by the following commands executed in mentioned order:
/home/nlp/opt/moses/scripts/tokenizer/tokenizer.perl -l pl < path_to_input.txt > path_to_output.tok.txt
/home/nlp/opt/moses/bin/moses -f /home/nlp/Downloads/TED/tuning/moses.tuned.ini.1 -drop-unknown -input-file path_to_output.tok.txt -th 8 > path_to_output.trans.txt
/home/nlp/opt/moses/scripts/tokenizer/detokenizer.perl -l en < path_to_output.trans.txt > path_to_output.final.txt
which translates the file path_to_input.txt and outputs to path_to_output.final.txt
I have made the following script for combining the 3 processes:
import shlex
import subprocess
from subprocess import STDOUT,PIPE
import os
import socket
class Translator:
#staticmethod
def pl_to_en(input_file, output_file):
# Tokenize
print("Tokenization started")
with open("tokenized.txt", "w+") as tokenizer_output:
with open(input_file) as tokenizer_input:
cmd = "/home/nlp/opt/moses/scripts/tokenizer/tokenizer.perl - l pl"
args = shlex.split(cmd)
p = subprocess.Popen(args, stdin=tokenizer_input, stdout=tokenizer_output)
p.wait()
print("Tokenization finished")
#Translate
print("Translation started")
with open("translated.txt", "w+") as translator_output:
cmd = "/home/nlp/opt/moses/bin/moses -f /home/nlp/Downloads/TED/tuning/moses.tuned.ini.1 -drop-unknown -input-file tokenized.txt -th 8"
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=translator_output)
p.wait()
print("Translation finished")
# Detokenize
print("Detokenization started")
with open("translated.txt") as detokenizer_input:
with open("detokenized.txt", "w+") as detokenizer_output:
cmd = "/home/nlp/opt/moses/scripts/tokenizer/detokenizer.perl -l en"
args = shlex.split(cmd)
p = subprocess.Popen(args, stdin=detokenizer_input, stdout=detokenizer_output)
p.wait()
print("Detokenization finished")
translator = Translator()
translator.pl_to_en("some_input_file.txt", "some_output_file.txt")
But only the tokenization part works.
The translator just outputs an empty file translated.txt. When looking at the output in the terminal, it looks like the translator loads the file tokenized.txt correctly, and does a translation. The problem is just how I collect the output from that process.
I would try something like the following - sending the output of the translator process to the pipe, and making the input of the detokenizer the pipe instead of using the files.
import shlex
import subprocess
from subprocess import STDOUT,PIPE
import os
import socket
class Translator:
#staticmethod
def pl_to_en(input_file, output_file):
# Tokenize
print("Tokenization started")
with open("tokenized.txt", "w+") as tokenizer_output:
with open(input_file) as tokenizer_input:
cmd = "/home/nlp/opt/moses/scripts/tokenizer/tokenizer.perl - l pl"
args = shlex.split(cmd)
p = subprocess.Popen(args, stdin=tokenizer_input, stdout=tokenizer_output)
p.wait()
print("Tokenization finished")
#Translate
print("Translation started")
cmd = "/home/nlp/opt/moses/bin/moses -f /home/nlp/Downloads/TED/tuning/moses.tuned.ini.1 -drop-unknown -input-file tokenized.txt -th 8"
args = shlex.split(cmd)
translate_p = subprocess.Popen(args, stdout=subprocess.PIPE)
translate_p.wait()
print("Translation finished")
# Detokenize
print("Detokenization started")
with open("detokenized.txt", "w+") as detokenizer_output:
cmd = "/home/nlp/opt/moses/scripts/tokenizer/detokenizer.perl -l en"
args = shlex.split(cmd)
detokenizer_p = subprocess.Popen(args, stdin=translate_p.stdout, stdout=detokenizer_output)
detokenizer_p.wait()
print("Detokenization finished")
translator = Translator()
translator.pl_to_en("some_input_file.txt", "some_output_file.txt")
For example:
#!/usr/bin/env python3
# cmd.py
import time
for i in range(10):
print("Count %d" % i)
time.sleep(1)
#!/usr/bin/env python3
import subprocess
# useCmd.py
p = subprocess.Popen(['./cmd.py'], stdout=subprocess.PIPE)
out, err = p.communicate()
out = out.decode()
print(out)
In useCmd.py I can print out the output of cmd.py, but only after it's finished outputting. How can I print out it in realtime and still have it stored in a string? (sort of like tee in bash.)
If you don't have to deal with stdin, you could avoid using communicate that is blocking, and read directly from the process stdout until your stdout ends:
p = subprocess.Popen(['python', 'cmd.py'], stdout=subprocess.PIPE)
# out, err = p.communicate()
while True:
line = p.stdout.readline()
if line != '':
print line,
else:
break
related
I'm struggled in trying to emulate this simple piece of bash:
$ cat /tmp/fifo.tub &
[1] 24027
$ gunzip -c /tmp/filedat.dat.gz > /tmp/fifo.tub
line 01
line 02
line 03
line 04
line 05
line 06
line 07
line 08
line 09
line 10
[1]+ Done cat /tmp/fifo.tub
Basically I tried this subprocess approach:
# -*- coding: utf-8 -*-
import os
import sys
import shlex
import pprint
import subprocess
def main():
fifo = '/tmp/fifo.tub'
filedat = '/tmp/filedat.dat.gz '
os.mkfifo(fifo,0777)
cat = "cat %s" % fifo
args_cat = shlex.split(cat)
pprint.pprint(args_cat)
cat = subprocess.Popen( args_cat,
close_fds=True,
preexec_fn=os.setsid)
print "PID cat: %s" % cat.pid
f = os.open(fifo ,os.O_WRONLY)
gunzip = 'gunzip -c %s' % (filedat)
args_gunzip = shlex.split(gunzip)
pprint.pprint(args_gunzip)
gunzip = subprocess.Popen( args_gunzip,
stdout = f,
close_fds=True,
preexec_fn=os.setsid)
print "PID gunzip: %s" % gunzip.pid
while not cat.poll():
# hangs for ever
pass
return True
if __name__=="__main__":
main()
cat process never ends.
Alternatively I tried to bypass the problem with threads but I get the same result.
import os
import sys
import shlex
import pprint
import subprocess
import threading
class Th(threading.Thread):
def __init__(self,cmd,stdout_h=None):
self.stdout = None
self.stderr = None
self.cmd = cmd
self.stdout_h = stdout_h
self.proceso = None
self.pid = None
threading.Thread.__init__(self)
def run(self):
if self.stdout_h:
self.proceso = subprocess.Popen(self.cmd,
shell=False,
close_fds=True,
stdout=self.stdout_h)
else:
self.proceso = subprocess.Popen( self.cmd,
close_fds=True,
shell=False)
print "PID: %d" % self.proceso.pid
def main():
fifo = '/tmp/fifo.tub'
filedat = '/tmp/filedat.dat.gz '
try:
os.unlink(fifo)
except:
pass
try:
os.mkfifo(fifo,0777)
except Exception , err:
print "Error '%s' tub %s." % (err,fifo)
sys.exit(5)
cat = "cat %s" % fifo
args_cat = shlex.split(cat)
pprint.pprint(args_cat)
cat = Th(cmd=args_cat)
cat.start()
try:
f = os.open(fifo ,os.O_WRONLY)
except Exception, err:
print "Error '%s' when open fifo %s " % (err,fifo)
sys.exit(5)
gunzip = 'gunzip -c %s ' % (filedat)
args_gunzip = shlex.split(gunzip)
pprint.pprint(args_gunzip)
gunzip = Th(cmd=args_gunzip,stdout_h=f)
gunzip.start()
gunzip.join()
cat.join()
while gunzip.proceso.poll() is None:
pass
if cat.proceso.poll() is None:
print "Why?"
cat.proceso.terminate()
return True
if __name__=="__main__":
main()
I'm clearly missing something, any help will be really welcome.
You are not closing the FIFO file descriptor so cat is just hanging there thinking there is more to come.
I think you can use the .wait() method as well to do the same thing as your while loop.
# -*- coding: utf-8 -*-
import os
import sys
import shlex
import pprint
import subprocess
def main():
fifo = '/tmp/fifo.tub'
filedat = '/tmp/filedat.dat.gz '
os.mkfifo(fifo,0777)
cat = "cat %s" % fifo
args_cat = shlex.split(cat)
pprint.pprint(args_cat)
cat = subprocess.Popen( args_cat,
close_fds=True,
preexec_fn=os.setsid)
print "PID cat: %s" % cat.pid
f = os.open(fifo ,os.O_WRONLY)
gunzip = 'gunzip -c %s' % (filedat)
args_gunzip = shlex.split(gunzip)
pprint.pprint(args_gunzip)
gunzip = subprocess.Popen( args_gunzip,
stdout = f,
close_fds=True,
preexec_fn=os.setsid)
print "PID gunzip: %s" % gunzip.pid
gunzip.wait()
print "gunzip finished"
os.close(f)
cat.wait()
print "cat finished"
return True
if __name__=="__main__":
main()
I am a Python newbie writing a Python (2.7) script that needs to exec a number of external applications, one of which writes a lot of output to its stderr stream. What I am trying to figure out is a concise and succinct way (in Python) to get the last N lines from that subprocess' stderr output stream.
Currently, I am running that external application from my Python script like so:
p = subprocess.Popen('/path/to/external-app.sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print "ERROR: External app did not complete successfully (error code is " + str(p.returncode) + ")"
print "Error/failure details: ", stderr
status = False
else:
status = True
I'd like to capture the last N lines of output from its stderr stream so that they can be written to a log file or emailed, etc.
N = 3 # for 3 lines of output
p = subprocess.Popen(['/path/to/external-app.sh'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print ("ERROR: External app did not complete successfully "
"(error code is %s)" % p.returncode)
print "Error/failure details: ", '\n'.join(stderr.splitlines()[-N:])
status = False
else:
status = True
If the whole output can't be stored in RAM then:
import sys
from collections import deque
from subprocess import Popen, PIPE
from threading import Thread
ON_POSIX = 'posix' in sys.builtin_module_names
def start_thread(func, *args):
t = Thread(target=func, args=args)
t.daemon = True
t.start()
return t
def consume(infile, output):
for line in iter(infile.readline, ''):
output(line)
infile.close()
p = Popen(['cat', sys.argv[1]], stdout=PIPE, stderr=PIPE,
bufsize=1, close_fds=ON_POSIX)
# preserve last N lines of stdout, print stderr immediately
N = 100
queue = deque(maxlen=N)
threads = [start_thread(consume, *args)
for args in (p.stdout, queue.append), (p.stderr, sys.stdout.write)]
for t in threads: t.join() # wait for IO completion
print ''.join(queue), # print last N lines
retcode = p.wait()