Problems in python trying to open 2 terminals - python

I'm trying to create a python script that allows you to open 2 terminals simultaneously, it works fine, but if I try to insert it into a loop the 2nd terminal doesn't open and it skips to the next statement, here is the code that works fine:
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
import multiprocessing as mp
import threading as th
def run(command):
cmd = Popen(command, PIPE, creationflags=CREATE_NEW_CONSOLE)
com, err = cmd.communicate()
print(com,err)
if __name__ == "__main__":
mp.freeze_support()
command = f"python testFile.py" # or command = f"testFile.exe"
process = mp.Process(target=run, args= (command,))
process.start()
input("Wait ")
as I said this works fine, but if I try to insert it in a loop the 2nd terminal doesn't open:
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
import multiprocessing as mp
import threading as th
def run(command):
cmd = Popen(command, PIPE, creationflags=CREATE_NEW_CONSOLE)
com, err = cmd.communicate()
print(com,err)
while True:
if __name__ == "__main__":
mp.freeze_support()
command = f"python testFile.py" # or command = f"testFile.exe"
process = mp.Process(target=run, args= (command,))
process.start()
input("Wait ")

Related

Distinguish between stdout and stderr in subprocess

I want to use subprocess to call other py files and get output.
The colors they output are the same, How can i distinguish between stdout and stderr in subprocess?
run.py
def run():
for i in range(3):
print('Processing {}.'.format(i))
time.sleep(1)
print(1/0)
run()
main.py
import subprocess
import sys
def byte2str(b):
return str(b, encoding='utf-8')
if __name__ == '__main__':
cmd = [sys.executable, 'temp/run.py']
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline()
if line:
print(byte2str(line), end='')

How do I reproduce `stdin=sys.stdin` with `stdin=PIPE`?

I have the following code that works exactly as intended:
from subprocess import Popen
process = Popen(
["/bin/bash"],
stdin=sys.stdin,
stdout=sys.stdout,
stderr=sys.stderr,
)
process.wait()
I can interactively use bash, tab works, etc.
However, I want to control what I send to stdin, so I'd like the following to work:
import os
import sys
from subprocess import Popen, PIPE
from select import select
process = Popen(
["/bin/bash"],
stdin=PIPE,
stdout=sys.stdout,
stderr=sys.stderr,
)
while True:
if process.poll() is not None:
break
r, _, _ = select([sys.stdin], [], [])
if sys.stdin in r:
stdin = os.read(sys.stdin.fileno(), 1024)
# Do w/e I want with stdin
os.write(process.stdin.fileno(), stdin)
process.wait()
But the behavior just isn't the same. I've tried another approach (going through a pty):
import os
import sys
import tty
from subprocess import Popen
from select import select
master, slave = os.openpty()
stdin = sys.stdin.fileno()
try:
tty.setraw(master)
ttyname = os.ttyname(slave)
def _preexec():
os.setsid()
open(ttyname, "r+")
process = Popen(
args=["/bin/bash"],
preexec_fn=_preexec,
stdin=slave,
stdout=sys.stdout,
stderr=sys.stderr,
close_fds=True,
)
while True:
if process.poll() is not None:
break
r, _, _ = select([sys.stdin], [], [])
if sys.stdin in r:
os.write(master, os.read(stdin, 1024))
finally:
os.close(master)
os.close(slave)
And the behavior is pretty close, except tab still doesn't work. Well, tab is properly sent, but my terminal doesn't show the completion, even though it was done by bash. Arrows also show ^[[A instead of going through history.
Any idea?
All I needed was setting my sys.stdout to raw. I also found out 3 things:
I need to restore the terminal settings on sys.stdout
subprocess.Popen has a start_new_session argument that does what my _preexec function is doing.
select.select accepts a 4th argument, which is a timeout before giving up. It lets me avoid being stuck in the select loop after exiting.
Final code:
import os
import sys
import tty
import termios
import select
import subprocess
master, slave = os.openpty()
stdin = sys.stdin.fileno()
try:
old_settings = termios.tcgetattr(sys.stdout)
tty.setraw(sys.stdout)
process = subprocess.Popen(
args=["/bin/bash"],
stdin=slave,
stdout=sys.stdout,
stderr=sys.stderr,
close_fds=True,
start_new_session=True,
)
while True:
if process.poll() is not None:
break
r, _, _ = select.select([sys.stdin], [], [], 0.2)
if sys.stdin in r:
os.write(master, os.read(stdin, 1024))
finally:
termios.tcsetattr(sys.stdout, termios.TCSADRAIN, old_settings)
os.close(master)
os.close(slave)

cannot kill a Sub process created by Popen when printing process.stdout

I have created a script which should run a command and kill it after 15 seconds
import logging
import subprocess
import time
import os
import sys
import signal
#cmd = "ping 192.168.1.1 -t"
cmd = "C:\\MyAPP\MyExe.exe -t 80 -I C:\MyApp\Temp -M Documents"
proc=subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,shell=True)
**for line in proc.stdout:
print (line.decode("utf-8"), end='')**
time.sleep(15)
os.kill(proc.pid, signal.SIGTERM)
#proc.kill() #Tried this too but no luck
This doesnot terminate my subprocess. however if I comment out the logging to stdout part, ie
for line in proc.stdout:
print (line.decode("utf-8"), end='')
the subprocess has been killed.
I have tried proc.kill() and CTRL_C_EVENT too but no luck.
Any help would be highly appreciated. Please see me as novice to python
To terminate subprocess in 15 seconds while printing its output line-by-line:
#!/usr/bin/env python
from __future__ import print_function
from threading import Timer
from subprocess import Popen, PIPE, STDOUT
# start process
cmd = r"C:\MyAPP\MyExe.exe -t 80 -I C:\MyApp\Temp -M Documents"
process = Popen(cmd, stdout=PIPE, stderr=STDOUT,
bufsize=1, universal_newlines=True)
# terminate process in 15 seconds
timer = Timer(15, terminate, args=[process])
timer.start()
# print output
for line in iter(process.stdout.readline, ''):
print(line, end='')
process.stdout.close()
process.wait() # wait for the child process to finish
timer.cancel()
Notice, you don't need shell=True here. You could define terminate() as:
def terminate(process):
if process.poll() is None:
try:
process.terminate()
except EnvironmentError:
pass # ignore
If you want to kill the whole process tree then define terminate() as:
from subprocess import call
def terminate(process):
if process.poll() is None:
call('taskkill /F /T /PID ' + str(process.pid))
Use raw-string literals for Windows paths: r"" otherwise you should escape all backslashes in the string literal
Drop shell=True. It creates an additional process for no reason here
universal_newlines=True enables text mode (bytes are decode into Unicode text using the locale preferred encoding automatically on Python 3)
iter(process.stdout.readline, '') is necessary for compatibility with Python 2 (otherwise the data may be printed with a delay due to the read-ahead buffer bug)
Use process.terminate() instead of process.send_signal(signal.SIGTERM) or os.kill(proc.pid, signal.SIGTERM)
taskkill allows to kill a process tree on Windows
The problem is reading from stdout is blocking. You need to either read the subprocess's output or run the timer on a separate thread.
from subprocess import Popen, PIPE
from threading import Thread
from time import sleep
class ProcKiller(Thread):
def __init__(self, proc, time_limit):
super(ProcKiller, self).__init__()
self.proc = proc
self.time_limit = time_limit
def run(self):
sleep(self.time_limit)
self.proc.kill()
p = Popen('while true; do echo hi; sleep 1; done', shell=True)
t = ProcKiller(p, 5)
t.start()
p.communicate()
EDITED to reflect suggested changes in comment
from subprocess import Popen, PIPE
from threading import Thread
from time import sleep
from signal import SIGTERM
import os
class ProcKiller(Thread):
def __init__(self, proc, time_limit):
super(ProcKiller, self).__init__()
self.proc = proc
self.time_limit = time_limit
def run(self):
sleep(self.time_limit)
os.kill(self.proc.pid, SIGTERM)
p = Popen('while true; do echo hi; sleep 1; done', shell=True)
t = ProcKiller(p, 5)
t.start()
p.communicate()

Threaded subprocess read is not give any output

I have the following code and am trying to run in in Idle in linux.
import sys
from subprocess import PIPE, Popen
from threading import Thread
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
def enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
out.close()
p = Popen(['youtube-dl', '-l', '-c', 'https://www.youtube.com/watch?v=utV1sdjr4PY'], stdout=PIPE, bufsize=1, close_fds=ON_POSIX)
q = Queue()
t = Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True # thread dies with the program
t.start()
# ... do other things here
# read line without blocking
while True:
try: line = q.get_nowait() # or q.get(timeout=.1)
except Empty:
pass
#print('no output yet')
else: # got line
print line
But is is always printing "no output yet".
Edit: I edited the code and it is working. But I have another problem. The percentage of the download is updated in a single line, but the code reads it only after the line is complete
OK, let's put the comments in an answer.
import sys, os
from subprocess import PIPE, Popen
from time import sleep
import pty
master, slave = pty.openpty()
stdout = os.fdopen(master)
p = Popen(['youtube-dl', '-l', '-c', 'https://www.youtube.com/watch?v=AYlb-7TXMxM'], shell=False,stdout=slave,stderr=slave, close_fds=True)
while True:
#line = stdout.readline().rstrip() - will strip the new line
line = stdout.readline()
if line != b'':
sys.stdout.write("\r%s" % line)
sys.stdout.flush()
sleep(.1)
If you want a thread and a diferent while, I sugest wrapping in a class and avoid queue. The output is „unbuffered” - thanks #FilipMalckzak

Python subprocess in parallel

I want to run many processes in parallel with ability to take stdout in any time. How should I do it? Do I need to run thread for each subprocess.Popen() call, a what?
You can do it in a single thread.
Suppose you have a script that prints lines at random times:
#!/usr/bin/env python
#file: child.py
import os
import random
import sys
import time
for i in range(10):
print("%2d %s %s" % (int(sys.argv[1]), os.getpid(), i))
sys.stdout.flush()
time.sleep(random.random())
And you'd like to collect the output as soon as it becomes available, you could use select on POSIX systems as #zigg suggested:
#!/usr/bin/env python
from __future__ import print_function
from select import select
from subprocess import Popen, PIPE
# start several subprocesses
processes = [Popen(['./child.py', str(i)], stdout=PIPE,
bufsize=1, close_fds=True,
universal_newlines=True)
for i in range(5)]
# read output
timeout = 0.1 # seconds
while processes:
# remove finished processes from the list (O(N**2))
for p in processes[:]:
if p.poll() is not None: # process ended
print(p.stdout.read(), end='') # read the rest
p.stdout.close()
processes.remove(p)
# wait until there is something to read
rlist = select([p.stdout for p in processes], [],[], timeout)[0]
# read a line from each process that has output ready
for f in rlist:
print(f.readline(), end='') #NOTE: it can block
A more portable solution (that should work on Windows, Linux, OSX) can use reader threads for each process, see Non-blocking read on a subprocess.PIPE in python.
Here's os.pipe()-based solution that works on Unix and Windows:
#!/usr/bin/env python
from __future__ import print_function
import io
import os
import sys
from subprocess import Popen
ON_POSIX = 'posix' in sys.builtin_module_names
# create a pipe to get data
input_fd, output_fd = os.pipe()
# start several subprocesses
processes = [Popen([sys.executable, 'child.py', str(i)], stdout=output_fd,
close_fds=ON_POSIX) # close input_fd in children
for i in range(5)]
os.close(output_fd) # close unused end of the pipe
# read output line by line as soon as it is available
with io.open(input_fd, 'r', buffering=1) as file:
for line in file:
print(line, end='')
#
for p in processes:
p.wait()
You can also collect stdout from multiple subprocesses concurrently using twisted:
#!/usr/bin/env python
import sys
from twisted.internet import protocol, reactor
class ProcessProtocol(protocol.ProcessProtocol):
def outReceived(self, data):
print data, # received chunk of stdout from child
def processEnded(self, status):
global nprocesses
nprocesses -= 1
if nprocesses == 0: # all processes ended
reactor.stop()
# start subprocesses
nprocesses = 5
for _ in xrange(nprocesses):
reactor.spawnProcess(ProcessProtocol(), sys.executable,
args=[sys.executable, 'child.py'],
usePTY=True) # can change how child buffers stdout
reactor.run()
See Using Processes in Twisted.
You don't need to run a thread for each process. You can peek at the stdout streams for each process without blocking on them, and only read from them if they have data available to read.
You do have to be careful not to accidentally block on them, though, if you're not intending to.
You can wait for process.poll() to finish, and run other stuff concurrently:
import time
import sys
from subprocess import Popen, PIPE
def ex1() -> None:
command = 'sleep 2.1 && echo "happy friday"'
proc = Popen(command, shell=True, stderr=PIPE, stdout=PIPE)
while proc.poll() is None:
# do stuff here
print('waiting')
time.sleep(0.05)
out, _err = proc.communicate()
print(out, file=sys.stderr)
sys.stderr.flush()
assert proc.poll() == 0
ex1()

Categories