I have the following code that works exactly as intended:
from subprocess import Popen
process = Popen(
["/bin/bash"],
stdin=sys.stdin,
stdout=sys.stdout,
stderr=sys.stderr,
)
process.wait()
I can interactively use bash, tab works, etc.
However, I want to control what I send to stdin, so I'd like the following to work:
import os
import sys
from subprocess import Popen, PIPE
from select import select
process = Popen(
["/bin/bash"],
stdin=PIPE,
stdout=sys.stdout,
stderr=sys.stderr,
)
while True:
if process.poll() is not None:
break
r, _, _ = select([sys.stdin], [], [])
if sys.stdin in r:
stdin = os.read(sys.stdin.fileno(), 1024)
# Do w/e I want with stdin
os.write(process.stdin.fileno(), stdin)
process.wait()
But the behavior just isn't the same. I've tried another approach (going through a pty):
import os
import sys
import tty
from subprocess import Popen
from select import select
master, slave = os.openpty()
stdin = sys.stdin.fileno()
try:
tty.setraw(master)
ttyname = os.ttyname(slave)
def _preexec():
os.setsid()
open(ttyname, "r+")
process = Popen(
args=["/bin/bash"],
preexec_fn=_preexec,
stdin=slave,
stdout=sys.stdout,
stderr=sys.stderr,
close_fds=True,
)
while True:
if process.poll() is not None:
break
r, _, _ = select([sys.stdin], [], [])
if sys.stdin in r:
os.write(master, os.read(stdin, 1024))
finally:
os.close(master)
os.close(slave)
And the behavior is pretty close, except tab still doesn't work. Well, tab is properly sent, but my terminal doesn't show the completion, even though it was done by bash. Arrows also show ^[[A instead of going through history.
Any idea?
All I needed was setting my sys.stdout to raw. I also found out 3 things:
I need to restore the terminal settings on sys.stdout
subprocess.Popen has a start_new_session argument that does what my _preexec function is doing.
select.select accepts a 4th argument, which is a timeout before giving up. It lets me avoid being stuck in the select loop after exiting.
Final code:
import os
import sys
import tty
import termios
import select
import subprocess
master, slave = os.openpty()
stdin = sys.stdin.fileno()
try:
old_settings = termios.tcgetattr(sys.stdout)
tty.setraw(sys.stdout)
process = subprocess.Popen(
args=["/bin/bash"],
stdin=slave,
stdout=sys.stdout,
stderr=sys.stderr,
close_fds=True,
start_new_session=True,
)
while True:
if process.poll() is not None:
break
r, _, _ = select.select([sys.stdin], [], [], 0.2)
if sys.stdin in r:
os.write(master, os.read(stdin, 1024))
finally:
termios.tcsetattr(sys.stdout, termios.TCSADRAIN, old_settings)
os.close(master)
os.close(slave)
Related
I saw this example that launches bash thru subprocess where asynchronous read/write is achieved. However I wonder if there is a way to preserve this behavior and capture a keyboard interrupt.
Run interactive Bash with popen and a dedicated TTY Python
This is my version of the code. The issue is python code is not INTERRUPTED.
import subprocess
import sys
import os
import signal
import atexit
import select
import termios
import tty
import pty
def execute(command):
old_tty = termios.tcgetattr(sys.stdin)
tty.setraw(sys.stdin.fileno())
master_fd, slave_fd = pty.openpty()
process = subprocess.Popen(command,
preexec_fn=os.setsid,
stdin=slave_fd,
stdout=slave_fd,
stderr=slave_fd,
universal_newlines=True)
global child_pid
child_pid = process.pid
while process.poll() is None:
r, w, e = select.select([sys.stdin, master_fd], [], [])
if sys.stdin in r:
d = os.read(sys.stdin.fileno(), 10240)
os.write(master_fd, d)
elif master_fd in r:
o = os.read(master_fd, 10240)
if o:
os.write(sys.stdout.fileno(), o)
output = process.communicate()[0]
exitCode = process.returncode
if (exitCode == 0):
return output
else:
raise ProcessException(command, exitCode, output)
def exit_protocol():
global child_pid
if child_pid is None:
pass
else:
print "Shutting Down bash", child_pid
atexit.register(exit_protocol)
try:
execute('bash')
except KeyboardInterrupt as exc:
print "Clean up all bash's child jobs"
I have the following code and am trying to run in in Idle in linux.
import sys
from subprocess import PIPE, Popen
from threading import Thread
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
def enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
out.close()
p = Popen(['youtube-dl', '-l', '-c', 'https://www.youtube.com/watch?v=utV1sdjr4PY'], stdout=PIPE, bufsize=1, close_fds=ON_POSIX)
q = Queue()
t = Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True # thread dies with the program
t.start()
# ... do other things here
# read line without blocking
while True:
try: line = q.get_nowait() # or q.get(timeout=.1)
except Empty:
pass
#print('no output yet')
else: # got line
print line
But is is always printing "no output yet".
Edit: I edited the code and it is working. But I have another problem. The percentage of the download is updated in a single line, but the code reads it only after the line is complete
OK, let's put the comments in an answer.
import sys, os
from subprocess import PIPE, Popen
from time import sleep
import pty
master, slave = pty.openpty()
stdout = os.fdopen(master)
p = Popen(['youtube-dl', '-l', '-c', 'https://www.youtube.com/watch?v=AYlb-7TXMxM'], shell=False,stdout=slave,stderr=slave, close_fds=True)
while True:
#line = stdout.readline().rstrip() - will strip the new line
line = stdout.readline()
if line != b'':
sys.stdout.write("\r%s" % line)
sys.stdout.flush()
sleep(.1)
If you want a thread and a diferent while, I sugest wrapping in a class and avoid queue. The output is „unbuffered” - thanks #FilipMalckzak
The task I try to accomplish is to stream a ruby file and print out the output. (NOTE: I don't want to print out everything at once)
main.py
from subprocess import Popen, PIPE, STDOUT
import pty
import os
file_path = '/Users/luciano/Desktop/ruby_sleep.rb'
command = ' '.join(["ruby", file_path])
master, slave = pty.openpty()
proc = Popen(command, bufsize=0, shell=True, stdout=slave, stderr=slave, close_fds=True)
stdout = os.fdopen(master, 'r', 0)
while proc.poll() is None:
data = stdout.readline()
if data != "":
print(data)
else:
break
print("This is never reached!")
ruby_sleep.rb
puts "hello"
sleep 2
puts "goodbye!"
Problem
Streaming the file works fine. The hello/goodbye output is printed with the 2 seconds delay. Exactly as the script should work. The problem is that readline() hangs in the end and never quits. I never reach the last print.
I know there is a lot of questions like this here a stackoverflow but non of them made me solve the problem. I'm not that into the whole subprocess thing so please give me a more hands-on/concrete answer.
Regards
edit
Fix unintended code. (nothing to do with the actual error)
I assume you use pty due to reasons outlined in Q: Why not just use a pipe (popen())? (all other answers so far ignore your "NOTE: I don't want to print out everything at once").
pty is Linux only as said in the docs:
Because pseudo-terminal handling is highly platform dependent, there
is code to do it only for Linux. (The Linux code is supposed to work
on other platforms, but hasn’t been tested yet.)
It is unclear how well it works on other OSes.
You could try pexpect:
import sys
import pexpect
pexpect.run("ruby ruby_sleep.rb", logfile=sys.stdout)
Or stdbuf to enable line-buffering in non-interactive mode:
from subprocess import Popen, PIPE, STDOUT
proc = Popen(['stdbuf', '-oL', 'ruby', 'ruby_sleep.rb'],
bufsize=1, stdout=PIPE, stderr=STDOUT, close_fds=True)
for line in iter(proc.stdout.readline, b''):
print line,
proc.stdout.close()
proc.wait()
Or using pty from stdlib based on #Antti Haapala's answer:
#!/usr/bin/env python
import errno
import os
import pty
from subprocess import Popen, STDOUT
master_fd, slave_fd = pty.openpty() # provide tty to enable
# line-buffering on ruby's side
proc = Popen(['ruby', 'ruby_sleep.rb'],
stdin=slave_fd, stdout=slave_fd, stderr=STDOUT, close_fds=True)
os.close(slave_fd)
try:
while 1:
try:
data = os.read(master_fd, 512)
except OSError as e:
if e.errno != errno.EIO:
raise
break # EIO means EOF on some systems
else:
if not data: # EOF
break
print('got ' + repr(data))
finally:
os.close(master_fd)
if proc.poll() is None:
proc.kill()
proc.wait()
print("This is reached!")
All three code examples print 'hello' immediately (as soon as the first EOL is seen).
leave the old more complicated code example here because it may be referenced and discussed in other posts on SO
Or using pty based on #Antti Haapala's answer:
import os
import pty
import select
from subprocess import Popen, STDOUT
master_fd, slave_fd = pty.openpty() # provide tty to enable
# line-buffering on ruby's side
proc = Popen(['ruby', 'ruby_sleep.rb'],
stdout=slave_fd, stderr=STDOUT, close_fds=True)
timeout = .04 # seconds
while 1:
ready, _, _ = select.select([master_fd], [], [], timeout)
if ready:
data = os.read(master_fd, 512)
if not data:
break
print("got " + repr(data))
elif proc.poll() is not None: # select timeout
assert not select.select([master_fd], [], [], 0)[0] # detect race condition
break # proc exited
os.close(slave_fd) # can't do it sooner: it leads to errno.EIO error
os.close(master_fd)
proc.wait()
print("This is reached!")
Not sure what is wrong with your code, but the following seems to work for me:
#!/usr/bin/python
from subprocess import Popen, PIPE
import threading
p = Popen('ls', stdout=PIPE)
class ReaderThread(threading.Thread):
def __init__(self, stream):
threading.Thread.__init__(self)
self.stream = stream
def run(self):
while True:
line = self.stream.readline()
if len(line) == 0:
break
print line,
reader = ReaderThread(p.stdout)
reader.start()
# Wait until subprocess is done
p.wait()
# Wait until we've processed all output
reader.join()
print "Done!"
Note that I don't have Ruby installed and hence cannot check with your actual problem. Works fine with ls, though.
Basically what you are looking at here is a race condition between your proc.poll() and your readline(). Since the input on the master filehandle is never closed, if the process attempts to do a readline() on it after the ruby process has finished outputting, there will never be anything to read, but the pipe will never close. The code will only work if the shell process closes before your code tries another readline().
Here is the timeline:
readline()
print-output
poll()
readline()
print-output (last line of real output)
poll() (returns false since process is not done)
readline() (waits for more output)
(process is done, but output pipe still open and no poll ever happens for it).
Easy fix is to just use the subprocess module as it suggests in the docs, not in conjunction with openpty:
http://docs.python.org/library/subprocess.html
Here is a very similar problem for further study:
Using subprocess with select and pty hangs when capturing output
Try this:
proc = Popen(command, bufsize=0, shell=True, stdout=PIPE, close_fds=True)
for line in proc.stdout:
print line
print("This is most certainly reached!")
As others have noted, readline() will block when reading data. It will even do so when your child process has died. I am not sure why this does not happen when executing ls as in the other answer, but maybe the ruby interpreter detects that it is writing to a PIPE and therefore it will not close automatically.
I want to run many processes in parallel with ability to take stdout in any time. How should I do it? Do I need to run thread for each subprocess.Popen() call, a what?
You can do it in a single thread.
Suppose you have a script that prints lines at random times:
#!/usr/bin/env python
#file: child.py
import os
import random
import sys
import time
for i in range(10):
print("%2d %s %s" % (int(sys.argv[1]), os.getpid(), i))
sys.stdout.flush()
time.sleep(random.random())
And you'd like to collect the output as soon as it becomes available, you could use select on POSIX systems as #zigg suggested:
#!/usr/bin/env python
from __future__ import print_function
from select import select
from subprocess import Popen, PIPE
# start several subprocesses
processes = [Popen(['./child.py', str(i)], stdout=PIPE,
bufsize=1, close_fds=True,
universal_newlines=True)
for i in range(5)]
# read output
timeout = 0.1 # seconds
while processes:
# remove finished processes from the list (O(N**2))
for p in processes[:]:
if p.poll() is not None: # process ended
print(p.stdout.read(), end='') # read the rest
p.stdout.close()
processes.remove(p)
# wait until there is something to read
rlist = select([p.stdout for p in processes], [],[], timeout)[0]
# read a line from each process that has output ready
for f in rlist:
print(f.readline(), end='') #NOTE: it can block
A more portable solution (that should work on Windows, Linux, OSX) can use reader threads for each process, see Non-blocking read on a subprocess.PIPE in python.
Here's os.pipe()-based solution that works on Unix and Windows:
#!/usr/bin/env python
from __future__ import print_function
import io
import os
import sys
from subprocess import Popen
ON_POSIX = 'posix' in sys.builtin_module_names
# create a pipe to get data
input_fd, output_fd = os.pipe()
# start several subprocesses
processes = [Popen([sys.executable, 'child.py', str(i)], stdout=output_fd,
close_fds=ON_POSIX) # close input_fd in children
for i in range(5)]
os.close(output_fd) # close unused end of the pipe
# read output line by line as soon as it is available
with io.open(input_fd, 'r', buffering=1) as file:
for line in file:
print(line, end='')
#
for p in processes:
p.wait()
You can also collect stdout from multiple subprocesses concurrently using twisted:
#!/usr/bin/env python
import sys
from twisted.internet import protocol, reactor
class ProcessProtocol(protocol.ProcessProtocol):
def outReceived(self, data):
print data, # received chunk of stdout from child
def processEnded(self, status):
global nprocesses
nprocesses -= 1
if nprocesses == 0: # all processes ended
reactor.stop()
# start subprocesses
nprocesses = 5
for _ in xrange(nprocesses):
reactor.spawnProcess(ProcessProtocol(), sys.executable,
args=[sys.executable, 'child.py'],
usePTY=True) # can change how child buffers stdout
reactor.run()
See Using Processes in Twisted.
You don't need to run a thread for each process. You can peek at the stdout streams for each process without blocking on them, and only read from them if they have data available to read.
You do have to be careful not to accidentally block on them, though, if you're not intending to.
You can wait for process.poll() to finish, and run other stuff concurrently:
import time
import sys
from subprocess import Popen, PIPE
def ex1() -> None:
command = 'sleep 2.1 && echo "happy friday"'
proc = Popen(command, shell=True, stderr=PIPE, stdout=PIPE)
while proc.poll() is None:
# do stuff here
print('waiting')
time.sleep(0.05)
out, _err = proc.communicate()
print(out, file=sys.stderr)
sys.stderr.flush()
assert proc.poll() == 0
ex1()
I'm looking for a Python solution that will allow me to save the output of a command in a file without hiding it from the console.
FYI: I'm asking about tee (as the Unix command line utility) and not the function with the same name from Python intertools module.
Details
Python solution (not calling tee, it is not available under Windows)
I do not need to provide any input to stdin for called process
I have no control over the called program. All I know is that it will output something to stdout and stderr and return with an exit code.
To work when calling external programs (subprocess)
To work for both stderr and stdout
Being able to differentiate between stdout and stderr because I may want to display only one of the to the console or I could try to output stderr using a different color - this means that stderr = subprocess.STDOUT will not work.
Live output (progressive) - the process can run for a long time, and I'm not able to wait for it to finish.
Python 3 compatible code (important)
References
Here are some incomplete solutions I found so far:
http://devlishgenius.blogspot.com/2008/10/logging-in-real-time-in-python.html (mkfifo works only on Unix)
http://blog.kagesenshi.org/2008/02/teeing-python-subprocesspopen-output.html (doesn't work at all)
Diagram http://blog.i18n.ro/wp-content/uploads/2010/06/Drawing_tee_py.png
Current code (second try)
#!/usr/bin/python
from __future__ import print_function
import sys, os, time, subprocess, io, threading
cmd = "python -E test_output.py"
from threading import Thread
class StreamThread ( Thread ):
def __init__(self, buffer):
Thread.__init__(self)
self.buffer = buffer
def run ( self ):
while 1:
line = self.buffer.readline()
print(line,end="")
sys.stdout.flush()
if line == '':
break
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdoutThread = StreamThread(io.TextIOWrapper(proc.stdout))
stderrThread = StreamThread(io.TextIOWrapper(proc.stderr))
stdoutThread.start()
stderrThread.start()
proc.communicate()
stdoutThread.join()
stderrThread.join()
print("--done--")
#### test_output.py ####
#!/usr/bin/python
from __future__ import print_function
import sys, os, time
for i in range(0, 10):
if i%2:
print("stderr %s" % i, file=sys.stderr)
else:
print("stdout %s" % i, file=sys.stdout)
time.sleep(0.1)
Real output
stderr 1
stdout 0
stderr 3
stdout 2
stderr 5
stdout 4
stderr 7
stdout 6
stderr 9
stdout 8
--done--
Expected output was to have the lines ordered. Remark, modifying the Popen to use only one PIPE is not allowed because in the real life I will want to do different things with stderr and stdout.
Also even in the second case I was not able to obtain real-time like out, in fact all the results were received when the process finished. By default, Popen should use no buffers (bufsize=0).
I see that this is a rather old post but just in case someone is still searching for a way to do this:
proc = subprocess.Popen(["ping", "localhost"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with open("logfile.txt", "w") as log_file:
while proc.poll() is None:
line = proc.stderr.readline()
if line:
print "err: " + line.strip()
log_file.write(line)
line = proc.stdout.readline()
if line:
print "out: " + line.strip()
log_file.write(line)
If requiring python 3.6 isn't an issue there is now a way of doing this using asyncio. This method allows you to capture stdout and stderr separately but still have both stream to the tty without using threads. Here's a rough outline:
class RunOutput:
def __init__(self, returncode, stdout, stderr):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
async def _read_stream(stream, callback):
while True:
line = await stream.readline()
if line:
callback(line)
else:
break
async def _stream_subprocess(cmd, stdin=None, quiet=False, echo=False) -> RunOutput:
if isWindows():
platform_settings = {"env": os.environ}
else:
platform_settings = {"executable": "/bin/bash"}
if echo:
print(cmd)
p = await asyncio.create_subprocess_shell(
cmd,
stdin=stdin,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
**platform_settings
)
out = []
err = []
def tee(line, sink, pipe, label=""):
line = line.decode("utf-8").rstrip()
sink.append(line)
if not quiet:
print(label, line, file=pipe)
await asyncio.wait(
[
_read_stream(p.stdout, lambda l: tee(l, out, sys.stdout)),
_read_stream(p.stderr, lambda l: tee(l, err, sys.stderr, label="ERR:")),
]
)
return RunOutput(await p.wait(), out, err)
def run(cmd, stdin=None, quiet=False, echo=False) -> RunOutput:
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
_stream_subprocess(cmd, stdin=stdin, quiet=quiet, echo=echo)
)
return result
The code above was based on this blog post: https://kevinmccarthy.org/2016/07/25/streaming-subprocess-stdin-and-stdout-with-asyncio-in-python/
This is a straightforward port of tee(1) to Python.
import sys
sinks = sys.argv[1:]
sinks = [open(sink, "w") for sink in sinks]
sinks.append(sys.stderr)
while True:
input = sys.stdin.read(1024)
if input:
for sink in sinks:
sink.write(input)
else:
break
I'm running on Linux right now but this ought to work on most platforms.
Now for the subprocess part, I don't know how you want to 'wire' the subprocess's stdin, stdout and stderr to your stdin, stdout, stderr and file sinks, but I know you can do this:
import subprocess
callee = subprocess.Popen(
["python", "-i"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
Now you can access callee.stdin, callee.stdout and callee.stderr like normal files, enabling the above "solution" to work. If you want to get the callee.returncode, you'll need to make an extra call to callee.poll().
Be careful with writing to callee.stdin: if the process has exited when you do that, an error may be rised (on Linux, I get IOError: [Errno 32] Broken pipe).
This is how it can be done
import sys
from subprocess import Popen, PIPE
with open('log.log', 'w') as log:
proc = Popen(["ping", "google.com"], stdout=PIPE, encoding='utf-8')
while proc.poll() is None:
text = proc.stdout.readline()
log.write(text)
sys.stdout.write(text)
If you don't want to interact with the process you can use the subprocess module just fine.
Example:
tester.py
import os
import sys
for file in os.listdir('.'):
print file
sys.stderr.write("Oh noes, a shrubbery!")
sys.stderr.flush()
sys.stderr.close()
testing.py
import subprocess
p = subprocess.Popen(['python', 'tester.py'], stdout=subprocess.PIPE,
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
print stdout, stderr
In your situation you can simply write stdout/stderr to a file first. You can send arguments to your process with communicate as well, though I wasn't able to figure out how to continually interact with the subprocess.
On Linux, if you really need something like the tee(2) syscall, you can get it like this:
import os
import ctypes
ld = ctypes.CDLL(None, use_errno=True)
SPLICE_F_NONBLOCK = 0x02
def tee(fd_in, fd_out, length, flags=SPLICE_F_NONBLOCK):
result = ld.tee(
ctypes.c_int(fd_in),
ctypes.c_int(fd_out),
ctypes.c_size_t(length),
ctypes.c_uint(flags),
)
if result == -1:
errno = ctypes.get_errno()
raise OSError(errno, os.strerror(errno))
return result
To use this, you probably want to use Python 3.10 and something with os.splice (or use ctypes in the same way to get splice). See the tee(2) man page for an example.
My solution isn't elegant, but it works.
You can use powershell to gain access to "tee" under WinOS.
import subprocess
import sys
cmd = ['powershell', 'ping', 'google.com', '|', 'tee', '-a', 'log.txt']
if 'darwin' in sys.platform:
cmd.remove('powershell')
p = subprocess.Popen(cmd)
p.wait()