GUI freezes after os.system call - python

My problem is with os.system. Until it finishes formatting, GUI freezes, and I can't fix it.
class ImageDialog(QtGui.QMainWindow):
def __init__(self):
QtGui.QDialog.__init__(self)
self.ui = uic.loadUi("Recursos/main.ui",self)
self.connect(self.ui.proteger_Button, QtCore.SIGNAL("clicked()"),self,
QtCore.SLOT("protec()"))
#QtCore.pyqtSlot()
def protec(self):
self.USB = "G:"
comando = "format %s /fs:ntfs /q /v:test /y" %(self.USB)
os.system(comando)`
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
window = ImageDialog()
window.show()
sys.exit(app.exec_())

The simplest fix in your case is to add:
from threading import Thread
to your imports list, and then:
#QtCore.pyqtSlot()
def protec(self):
self.USB = "G:"
comando = "format %s /fs:ntfs /q /v:test /y" %(self.USB)
t = Thread(target = lambda: os.system(comando))
t.start()
This way the GUI thread will not get stuck waiting for the external process to finish.
You could probably remember the t's value and manage it in some way, so the number of threads running won't increase without limit, in case the external commands will hang, or run for a long time.
Note that it is not a good way to handle external processes. subprocess module is recommended for this.

Related

Can't kill robocopy subprocess from python

In my project on windows, I would like to start the mirroring of two directories.
I know that I can use python watchdog to do that, but I though that using robocopy would be easier and faster.
To simplify the situation, let's assume I have a GUI with two buttons: start and stop mirroring.
Here below is a snippet with the relevant code:
class MirrorDemon(Thread):
def __init__(self, src, dest) :
self.threading_flag = Event()
self.src = src
self.dest = dest
self.opt = ' /MIR /MON:1 /MOT:1'
self.mirror = None
Thread.__init__(self)
def run(self):
command = 'robocopy {} {} {}'.format(str(self.src),str(self.dest), self.opt)
self.p = subprocess.Popen(command.split(), shell=True)
print(command)
print('start robocopy with PID {}'.format(self.p.pid))
class Window(QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super().__init__(parent)
self.setupUi(self)
def stop_demon(self):
self.mirror.threading_flag.set()
self.mirror.p.kill()
self.mirror.join()
print('stop demon')
def start_demon(self):
self.mirror = MirrorDemon(Path('./src'), Path('./dest'))
self.mirror.setDaemon(True)
self.mirror.start()
print('start demon')
if __name__ == "__main__":
app = QApplication(sys.argv)
win = Window()
win.show()
sys.exit(app.exec())
When you click on the start button, you get a PID print out on the console and if I check this PID in the tasklist it corresponds to 'cmd.exe' process and the robocopy starts its job.
When you click on stop, the cmd.exe process corresponding to the PID disappers, but the background robocopy continues!!
I have tried several variations, but no luck.
Do you have some advises? Do you know if somebody has found a solution? Or maybe implemented a mirroring watchdog?
thanks
Update
Following the the suggestion of #Ulrich, setting shell=False is actually doing the trick and killing the robocopy process.
Thanks!
By changing this:
self.p = subprocess.Popen(command.split(), shell=True)
To this:
self.p = subprocess.Popen(command.split(), shell=False)
... you're ensuring that the process will be started directly from the current process, without starting a new shell process to start it in.
The PID you were getting back was for the shell process, and you can kill the shell without killing processes launched from that shell. By not starting it in a new shell, the PID you're getting back is the PID of the actual process and you'll be able to kill it as expected.
As the documentation states: "The only time you need to specify shell=True on Windows is when the command you wish to execute is built into the shell (e.g. dir or copy). You do not need shell=True to run a batch file or console-based executable."

python parallel processes return exit code

lets see if I can make this clear... I'm a total Python beginner so bear with me, this is my first python program (though I'm familiar with basic scripting in a few other languages). I've been searching around for hours and I'm sure the answer to this is fairly simple but I have yet to get it to work properly.
I'm writing a code that should launch multiple commandline processes, and when each one finishes I want to update a cell in a QTableWidget. The table has a row for each process to run, and each row has a cell for the "status" of the process.
I can run this no problem if I just do a for loop, spawning one process per row using subprocess.call() however this is too linear and I would like to fire them all off at the same time and not hang the program for each loop cycle. I've been digging through the subprocess documentation and am having a really hard time with it. I understand that I need to use subprocess.Popen (which will prevent my program from hanging while the process runs, and thus I can spawn multiple instances). Where I run into trouble is getting the exit code back so that I can update my table, without hanging the program - for instance using subprocess.wait() followed by a subprocess.returncode still just sticks until the process completes. I need a sort of "when process completes, check the exit code and run a function that updates the QTableWidget."
I did find these two posts that seemed to get me going in the right direction, but didn't quite get me there:
Understanding Popen.communicate
How to get exit code when using Python subprocess communicate method?
Hopefully that made sense. Here's a simplified version of my code, I realize it is half-baked and half-broken but I've been screwing around with it for over an hour and I've lost track of a few things...
import os, subprocess
ae_app = 'afterfx'
ae_path = os.path.join('C:/Program Files/Adobe/Adobe After Effects CC 2015/Support Files', ae_app + ".exe")
filename = "E:/Programming/Python/Archive tool/talk.jsx"
commandLine = 'afterfx -noui -r ' + filename
processList = [commandLine]
processes = []
for process in processList:
f = os.tmpfile()
aeProcess = subprocess.Popen(process, executable=ae_path, stdout=f)
processes.append((aeProcess, f))
for aeProcess, f in processes:
# this is where I need serious help...
aeProcess.wait()
print "the line is:"
print aeProcess.returncode
Spencer
You mentioned PyQt, so you can use PyQt's QProcess class.
def start_processes(self, process_list):
for cmd, args in process_list:
proc = QProcess(self)
proc.finished.connect(self.process_finished)
proc.start(cmd, args)
def process_finished(self, code, status):
# Do something
UPDATE: Added fully working example. Works properly for both PyQt4 and PyQt5 (to switch just comment line 3 and uncomment line 4)
sleeper.py
import sys
from time import sleep
from datetime import datetime as dt
if __name__ == '__main__':
x = int(sys.argv[1])
started = dt.now().time()
sleep(x)
ended = dt.now().time()
print('Slept for: {}, started: {}, ended: {}'.format(x, started, ended))
sys.exit(0)
main.py
import sys
from PyQt5 import QtCore, QtWidgets
# from PyQt4 import QtCore, QtGui as QtWidgets
class App(QtWidgets.QMainWindow):
cmd = r'python.exe C:\_work\test\sleeper.py {}'
def __init__(self):
super(App, self).__init__()
self.setGeometry(200, 200, 500, 300)
self.button = QtWidgets.QPushButton('Start processes', self)
self.button.move(20, 20)
self.editor = QtWidgets.QTextEdit(self)
self.editor.setGeometry(20, 60, 460, 200)
self.button.clicked.connect(self.start_proc)
def start_proc(self):
for x in range(5):
proc = QtCore.QProcess(self)
proc.finished.connect(self.finished)
proc.start(self.cmd.format(x))
def finished(self, code, status):
self.editor.append(str(self.sender().readAllStandardOutput()))
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
gui = App()
gui.show()
app.exec_()

PySide browser in a separate process

I trying to write a simple parser, which checks some web pages and if content on these pages is changed, then script sends url to the headless webkit browser which runs using PySide binding to Qt and makes a screenshot. I want this browser always running in the background separate process waiting for url to appear in queue. As soon as url comes, it makes screenshot, saves it and then returns to waiting.
I try to implement this behavior with this code(i cut the parser part):
import multiprocessing
import sys
from datetime import datetime
from PySide import QtGui, QtWebKit, QtCore
class Browser(QtWebKit.QWebPage):
def __init__(self, queue_in, queue_out):
self.app = QtGui.QApplication(sys.argv)
QtWebKit.QWebPage.__init__(self)
self.queue_out = queue_out
self.queue_in = queue_in
self.setViewportSize(QtCore.QSize(900, 900))
self.mainFrame().setScrollBarPolicy(QtCore.Qt.Horizontal, QtCore.Qt.ScrollBarAlwaysOff)
self.mainFrame().setScrollBarPolicy(QtCore.Qt.Vertical, QtCore.Qt.ScrollBarAlwaysOff)
self.mainFrame().loadFinished.connect(self._makeScreenshot)
self.makeScreenshotOf()
def makeScreenshotOf(self):
self.mainFrame().setUrl(QtCore.QUrl.fromEncoded(self.queue_in.get()))
def _makeScreenshot(self):
image = QtGui.QImage(self.viewportSize(), QtGui.QImage.Format_ARGB32)
painter = QtGui.QPainter(image)
self.mainFrame().render(painter)
painter.end()
file_name = datetime.now().strftime("%Y-%m-%d %H-%M-%S-%f") + ".png"
image.save(file_name)
self.queue_out.put(file_name)
self.makeScreenshotOf()
if __name__ == "__main__":
multiprocessing.set_start_method('spawn')
queue_in = multiprocessing.Queue()
queue_out = multiprocessing.Queue()
t = threading.Thread(target = Browser, args = (queue_in, queue_out))
t.start()
queue_in.put(url)
The problem is that on the first run process successfully stays on hold, awaiting url to appear in queue, but as soon as it gets url, process just stops, ignoring Qt connection
self.mainFrame().loadFinished.connect(self._makeScreenshot)
The thing is that if i directly inherit from Process
class Browser(multiprocessing.Process):
def __init__(self, queue_in, queue_out):
multiprocessing.Process.__init__(self)
self.queue_out = queue_out
self.queue_in = queue_in
self.app = QtGui.QApplication(sys.argv)
self.browser = QtWebKit.QWebPage()
...
if __name__ == "__main__":
queue_in = multiprocessing.Queue()
queue_out = multiprocessing.Queue()
b = Browser(queue_in, queue_out)
Then connection is not ignored and all works perfectly, but as a side effect self.queue_in.get() invoked in a Browser process also blocks main process (if queue is empty).
Questions:
Why the Qt connection is not working in the first case and working in the other?
Why in the second case queue.get() blocks the main process? How to prevent this?
Queue.get() blocks if the queue is empty. Use get_nowait(), which will raise an exception if there's nothing there.
Well it seems that call to app.exec_() was essential. Everything works now. Except that i get
warning qapplication was not created in the main()
Despite everything works anyway, i decided to move it into the main
app = QtGui.QApplication(sys.argv)
browser = Browser(queue_in, queue_out)
app.exec_()
and run parser part in a separate process.
UPD
Figured how to run QApplication in a process
class QtApp(QtGui.QApplication):
"""
docstring
"""
def __init__(self, args, url_queue, filename_queue):
QtGui.QApplication.__init__(self, args)
browser = Browser(url_queue, filename_queue)
self.exec_()
browser_process = multiprocessing.Process(target=QtApp,
args=(sys.argv, url_queue, filename_queue))

Is using python threading with gtk, and cmd ok?

I have a script I wrote out of modifications of helloworld for gkt, and cmd.
#!/usr/bin/python
import cmd
from gi.repository import Gtk
import threading
class GtkInterface(object):
def __init__(self):
win = Gtk.Window()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
self.window = win;
def create_button(self):
self.button = Gtk.Button(label="Click Here")
self.button.connect("clicked", self.on_button_clicked)
self.window.add(self.button)
self.window.show_all()
def on_button_clicked(self, widget):
print 'something happened'
return
class HelloWorld(cmd.Cmd):
#Simple command processor example.
prompt='>'
def __init__(self, gtk_object):
cmd.Cmd.__init__(self)
# or cmd.Cmd.__init__(self)
self.gtk_object = gtk_object
def do_greet(self, line):
print "hello"
def do_setbutton(self, line):
self.gtk_object.create_button()
def do_exit(self, line):
return True
gtk_o = GtkInterface()
hello = HelloWorld(gtk_o)
def worker(num):
"""thread worker function"""
#print 'Worker: %s' % num
hello.cmdloop()
return
def worker2(num):
Gtk.main()
threads = []
t = threading.Thread(target=worker, args=(1,))
threads.append(t)
t2 = threading.Thread(target=worker2, args=(2,))
threads.append(t2)
if __name__ == '__main__':
#HelloWorld().cmdloop()
#Gtk.main()
t.start()
t2.start()
This works. What I'd like to know is this ok? Are there issues to look out for? This is my first time trying this so there are a lot of unknowns for me. I understand that both cmd, and gtk are blocking. The Gtk.main, and cmd loops work flawlessly so far. I'm just being cautious.
My first time using threading too. When cmd gets the command to setbutton the button is set. When the button is clicked 'something happened' prints. The command line continues as if nothing out of the ordinary happened. I was really surprised at how seamless it all works. Yet I am still a little worried.
GTK has its' own threading library, and you need to be careful I think with complex applications : http://faq.pygtk.org/index.py?req=show&file=faq20.006.htp - for instance when you have threads which update your GUI indepedently of the main thread.
In your example you do have a threaded application, although in fact your entire GTK application is running in a single thread - so you are ok.
GTK+ is not thread safe and there are a few problems with the example which may cause instability. The example loads GTK+ in the main thread, runs the GTK+ main loop in a different thread and creates GTK+ widgets in yet another thread. All the GTK+ API calls should occur in the main thread with other threads communicating back to the main GUI thread by adding idle or timer callbacks. Have a read through the PyGObject threading wiki [1].
https://wiki.gnome.org/Projects/PyGObject/Threading

Redirecting processes from multiprocessing to separate wx.TextCtrl

I have four text boxes and up to four processes that I will be starting using the multiprocessing module. I can get the processes to execute properly, but I would really like to redirect all the output of each process to a different wx.TextCtrl so I can see what is going on throughout the solution process. I have done this successfully with a single thread and sys.stdout redirection as in
http://www.velocityreviews.com/forums/t515815-wxpython-redirect-the-stdout-to-a-textctrl.html
but a similar idea doesn't work with processes. Can someone hack me a simple solution for this problem? I can't imagine I am the only person to have ever run into this problem.
As I understand it, in wxPython land you normally want to run processes from a thread. So you communicate from the processes to the thread and from the thread back to wxPython. I would use some kind of naming scheme to associate each potential process with a text control (maybe 1-4?) and pass that back to the thread which will use wx.CallAfter or wx.PostEvent to tell wx to update.
You might be able to use a simple Python socket server to accomplish this too. Post a message to the server with a header that says which text control it belongs to. In the wx part you could have a wx.Timer check the socket server for new messages and update as appropriate.
For those that are interested, here is a working code snippet that does what my original question asked. It works a treat.
This code creates a thread that wraps a process. Within the run() function of the process the stderr and stdout are redirected to a pipe using a redirection class. Pipes can be pickled which is a pre-requisite to using them within the run() function of the process.
The Thread then just sits and pulls output from the pipe as long as there is output waiting. The text pulled from the pipe is written to a wx.TextCtrl using the wx.CallAfter function. Note that this is a non-blocking call, and in fact all the code here is non-blocking which makes for a responsive GUI. Note the flush() function in the redirection class to redirect stderr as well.
Note: One thing to be cautious of is that if you are trying to read and write with the pipe at too high a throughput, the GUI will lock up. But as long as you have reasonably slow output, there is no problem
import wx
import sys
import time
from multiprocessing import Pipe, Process
from threading import Thread
class RedirectText2Pipe(object):
def __init__(self, pipe_inlet):
self.pipe_inlet = pipe_inlet
def write(self, string):
self.pipe_inlet.send(string)
def flush(self):
return None
class Run1(Process):
def __init__(self, pipe_inlet):
Process.__init__(self)
self.pipe_std = pipe_inlet
def run(self):
redir = RedirectText2Pipe(self.pipe_std)
sys.stdout = redir
sys.stderr = redir
for i in range(100):
time.sleep(0.01)
print i,'Hi'
class RedirectedWorkerThread(Thread):
"""Worker Thread Class."""
def __init__(self, stdout_target):
"""Init Worker Thread Class."""
Thread.__init__(self)
self.stdout_target_ = stdout_target
def run(self):
"""
In this function, actually run the process and pull any output from the
pipes while the process runs
"""
pipe_outlet, pipe_inlet = Pipe(duplex = False)
p = Run1(pipe_inlet)
p.daemon = True
p.start()
while p.is_alive():
#Collect all display output from process
while pipe_outlet.poll():
wx.CallAfter(self.stdout_target_.WriteText, pipe_outlet.recv())
class MainFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self,None)
self.txt1 = wx.TextCtrl(self, style = wx.TE_MULTILINE|wx.TE_READONLY)
self.txt2 = wx.TextCtrl(self, style = wx.TE_MULTILINE|wx.TE_READONLY)
self.txt3 = wx.TextCtrl(self, style = wx.TE_MULTILINE|wx.TE_READONLY)
self.btn = wx.Button(self, label='Run')
self.btn.Bind(wx.EVT_BUTTON, self.OnStart)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.AddMany([(self.txt1,1,wx.EXPAND),(self.txt2,1,wx.EXPAND),(self.txt3,1,wx.EXPAND),self.btn])
self.SetSizer(sizer)
def OnStart(self, event):
t1 = RedirectedWorkerThread(self.txt1)
t1.daemon = True
t1.start()
t2 = RedirectedWorkerThread(self.txt2)
t2.daemon = True
t2.start()
t3 = RedirectedWorkerThread(self.txt3)
t3.daemon = True
t3.start()
if __name__ == '__main__':
app = wx.App(False)
frame = MainFrame()
frame.Show(True)
app.MainLoop()

Categories