PySide: Threads destroyed whie still running - python

I'm having problem with QThreads in python.
I would like to start my multi QThread when I push on button Run.
But the compiler outputs following error:
"QThread: Destroyed while thread is still running"
I don't know what is wrong with my code.
Any help would be appreciated.
Here is my code:
# -*- coding: utf-8 -*-
from PySide import QtCore, QtGui
from Ui_MainWindow import Ui_MainWindow
from queue import Queue
import sys, re, random
import time, random, re, urllib.request
from urllib.parse import urljoin
class Worker(QtCore.QThread):
def __init__(self,threadID, name, q, delay):
QtCore.QThread.__init__(self)
self.threadID = threadID
self.name = name
self.q = q
self.delay = delay
self._running = False
def run(self):
self._running = True
print ("start - %s" %self.name)
while self._running:
req = self.request(self.name, self.q, self.delay)
def stop(self, wait=False):
print (self.name)
self._running = False
def request(self, threadName, q1, delay):
while not self.q.empty():
time.sleep(delay)
q = q1.get()
print ("%s: %s %s %s" % (threadName, time.ctime(time.time()), q, delay))
if self.q.empty():
print ("queue empty")
self.stop()
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.backend = Queue()
self.connect(self.ui.actionStart, QtCore.SIGNAL('triggered()'),self.start)
def start(self):
try :
f1 = open('./payload/backend.log')
except FileNotFoundError as e:
return
threadList = ["Thread-1", "Thread-2", "Thread-3", "Thread-4", "Thread-5"]
self.url = "http://test.com"
self.threads = []
threadID = 1
for payload in f1.read().splitlines() :
full_url = urljoin(self.url, payload)
self.backend_dir.put(full_url)
for tName in threadList:
ran_int = random.randint(1, 2)
downloader = Worker(threadID, tName, self.backend, ran_int)
downloader.start()
self.threads.append(downloader)
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
log
QThread: Destroyed while thread is still running
QThread: Destroyed while thread is still running
QThread: Destroyed while thread is still running
QThread: Destroyed while thread is still running

You are trying to do something that is a built-in of Qt: a QThreadPool.
I would advice you to read the doc and use it instead.
If you really want to use QThread:
You should not subclass QThread. Instead you should subclass the basic QObject to create your worker and use the moveToThread method:
class WorkerSignals(QObject):
finished = pyqtSignal()
class Worker(QObject):
def __init__():
self.signal = WorkerSignals()
def run(self):
# Your stuff
print('running')
self.signal.finished.emit()
Then somewhere else:
thread = QThread()
worker = Worker(...)
worker.moveToThread(thread)
thread.started.connect(worker.run)
worker.finished.connect(thread.quit)
worker.finished.connect(worker.deleteLater)
thread.finished(thread.deleteLater)
thread.start()
The solution is a rough translation of this one in C++:
https://mayaposch.wordpress.com/2011/11/01/how-to-really-truly-use-qthreads-the-full-explanation/
Hope this helps!

The problem is caused by the way you are using the queue.
All the threads start and begin their tasks normally, up until the queue becomes empty. At that point, the first thread to finish terminates, but the other four threads are left waiting for an item to be returned from the queue, which never comes.
This is because you use get with no arguments, which will block indefinitely until an item becomes available. Instead, you should use get_nowait, and also call stop() unconditionally at the end of request():
from queue import Queue, Empty
...
class Worker(QtCore.QThread):
...
def request(self, threadName, q1, delay):
while not q1.empty():
time.sleep(delay)
try:
q = q1.get_nowait()
except Empty:
break
else:
print ("%s: %s %s %s" % (threadName, time.ctime(time.time()), q, delay))
print ("queue empty")
self.stop()

I believe that you need to call self.threads.append(downloader) before downloader.start() so that the thread doesn't go out of scope and get garbage collected.

Related

python thread not exiting with atexit

Here is my script. When I run it in a shell it just hangs indefinitely whereas I would expect it to terminate cleanly.
import logging
from logging import StreamHandler
import pymsteams
import queue
import threading
import atexit
class TeamsHandler(StreamHandler):
def __init__(self, channel_url):
super().__init__()
self.channel_url = channel_url
self.queue = queue.Queue()
self.thread = threading.Thread(target=self._worker)
self.thread.start()
atexit.register(self.queue.put, None)
def _worker(self):
while True:
record = self.queue.get()
if record is None:
break
msg = self.format(record)
print(msg)
def emit(self, record):
# enqueue the record to log and return control to the caller
self.queue.put(record)
if __name__ == "__main__":
my_logger = logging.getLogger('TestLogging')
my_logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
my_logger.addHandler(console_handler)
CHANNEL_ID = "not_used_anyway"
teamshandler = TeamsHandler(CHANNEL_ID)
teamshandler.setFormatter(logging.Formatter('%(levelname)s %(message)s'))
teamshandler.setLevel(logging.DEBUG)
my_logger.addHandler(teamshandler)
for i in range(1, 2):
my_logger.error(f"this is an error [{i}]")
my_logger.info(f"this is an info [{i}]")
The None record that should be sent by atexit (line 28) never arrives so the thread stays open forever.
How to make sure that the program exits cleanly by modifying the TeamsHandler only ?
I got something working, have a look:
import queue
import threading
class Worker:
def __init__(self):
self.queue = queue.Queue()
threading.Thread(target=self._worker).start()
def _worker(self):
print("starting thread")
while True:
record = self.queue.get()
if record is None:
print("exiting")
break
print(f"Got message: {record}")
def emit(self, record):
self.queue.put(record)
class Wrapper:
def __init__(self):
self._worker = Worker()
def __del__(self):
print("Wrapper is being deleted")
self._worker.emit(None)
def emit(self, record):
self._worker.emit(record)
def main():
worker = Wrapper()
worker.emit("foo")
worker.emit("bar")
print("main exits")
if __name__ == "__main__":
main()
The point here is that when main exits, worker (which is an instance of Wrapper) goes out of scope, and its __del__ method is called, and it sends stop message to a real worker object.
The results of running this code ("Got message" lines can be in different places, of course):
starting thread
main exits
Wrapper is being deleted
Got message: foo
Got message: bar
exiting
As pointed out by avysk, the problem is likely that atexit handlers fire too late, after the waiting for the non-daemon threads is already (supposed to be) done, which leads to deadlock.
If I were you, I'd just add a call like TeamsHandler.finish() at the end of if __name__ == '__main__' block, and modify TeamsHandler along these lines (untested):
_queues = []
class TeamsHandler(StreamHandler):
def __init__(self, channel_url):
super().__init__()
self.channel_url = channel_url
self.queue = queue.Queue()
self.thread = threading.Thread(target=self._worker)
self.thread.start()
_queues.append(self.queue)
def _worker(self):
while True:
record = self.queue.get()
if record is None:
break
msg = self.format(record)
print(msg)
def emit(self, record):
# enqueue the record to log and return control to the caller
self.queue.put(record)
#staticmethod
def finish(self):
for q in _queues:
q.put(None)
del _queues[:]

Showing QInputDialog and other gui objects from various threads

I have a websocket server running in python, and for every new connection a new thread will be created and the requests will be served.
In the main-thread [Gui-thread],i am initialing QApplication([]). the use case is, when i process the request i wanted to wait and get text response from the user through QInputDialog.
when ever i run it, there is a event-loop running but is not showing the gui. because all the gui elements can be displayed from Gui-thread itself.
I have tried various approaches using QSignals/slots and Pypubsub but unable to achieve what is required. please do suggest some idea to get the use-case done. a pseudo-code is well appreciated.
Below mentioned code are some examples i tried. i am using thread in below examples because, as i mentioned each request from a connection is executed with the thread assigned to the connection. and the text from QInputDialog is required by the thread.
thanks in advance.
below is the websockets server code which serves the request calling server_extentions function, i have to show QInputDialog everytime i get a incoming request.
import websockets
import asyncio
from PyQt5.QtWidgets import QInputDialog, QApplication
app = QApplication([])
async def server_extentions(websocket, path):
try:
while(True):
request = await websocket.recv()
# this is where i need to show input dialog.
text, ok = QInputDialog.getText(None, "Incoming message", request)
if ok:
response = text
else:
response = "NO REPLY"
await websocket.send(response)
except websockets.ConnectionClosed as exp:
print("connection closed.")
start_server = websockets.serve(server_extentions, '127.0.0.1', 5588)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(start_server)
loop.run_forever()
finally:
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
----edit-----
Below is some general idea, i tried using pypubsub.
import threading
import pubsub.pub
from PyQt5.QtWidgets import QInputDialog, QApplication
class MainThread:
def __init__(self):
self.app = QApplication([])
pubsub.pub.subscribe(self.pub_callback, "lala")
def pub_callback(self):
print("this is Main thread's pub callback.")
QInputDialog.getText(None, "main-thread", "lala call back : ")
def start_thread(self):
self.th = threading.Thread(target=self.thread_proc)
self.th.start()
def thread_proc(self):
pubsub.pub.sendMessage("lala")
m = MainThread()
m.start_thread()
-----edit 2 -------
below is something i tried with QSignal. [check the comment in the code, How to call a function with Mainthread].
import threading
from PyQt5.QtWidgets import QInputDialog, QApplication
from PyQt5.QtCore import pyqtSignal, QObject, QThread
class TextDialog(QObject):
sig = pyqtSignal(str)
def __init__(self):
QObject.__init__(self)
def get_text(self):
print("class Thread2, showing QInputDialog.")
text, ok = QInputDialog.getText(None, "Lala", "give me some text : ")
if ok:
self.sig.emit(text)
return
self.sig.emit("NO TEXT")
return
class Thread1:
def thread_proc(self):
td = TextDialog()
td.sig.connect(self.get_text_callback)
td.moveToThread(m.main_thread)
# here i dont understand how to invoke MainThread's show_dialog with main thread. [GUI Thread]
#m.show_dialog(td)
def get_text_callback(self, txt):
print("this is get_text_callback, input : " + str(txt))
class MainThread:
def __init__(self):
self.app = QApplication([])
self.main_thread = QThread.currentThread()
def main_proc(self):
th1 = Thread1()
th = threading.Thread(target=th1.thread_proc)
th.start()
def show_dialog(self, text_dialog: TextDialog):
print("got a call to MainThread's show_dialog.")
text_dialog.get_text()
m = MainThread()
m.main_proc()
exit()
For this type of applications it is better to implement the worker-thread approach. This approach the main idea is to implement QObjects, move them to a new thread and invoke the slots asynchronously (through QEvents, pyqtSignals, QTimer.singleShot(...), QMetaObject::invokeMethod(...), etc) so that the tasks are executed in the thread that Live the QObject.
import threading
from functools import partial
from PyQt5 import QtCore, QtWidgets
class TextDialog(QtCore.QObject):
sig = QtCore.pyqtSignal(str)
#QtCore.pyqtSlot()
def get_text(self):
print("class Thread2, showing QInputDialog.")
text, ok = QtWidgets.QInputDialog.getText(
None, "Lala", "give me some text : "
)
if ok:
self.sig.emit(text)
return
self.sig.emit("NO TEXT")
return
class Worker1(QtCore.QObject):
#QtCore.pyqtSlot(QtCore.QObject)
def thread_proc(self, manager):
print(
"current: {}- main: {}".format(
threading.current_thread(), threading.main_thread()
)
)
manager.td.sig.connect(self.get_text_callback)
QtCore.QTimer.singleShot(0, manager.show_dialog)
#QtCore.pyqtSlot(str)
def get_text_callback(self, txt):
print(
"current: {}- main: {}".format(
threading.current_thread(), threading.main_thread()
)
)
print("this is get_text_callback, input : %s" % (txt,))
class Manager(QtCore.QObject):
def __init__(self, parent=None):
super().__init__(parent)
self.td = TextDialog()
#QtCore.pyqtSlot()
def show_dialog(self):
print("got a call to MainThread's show_dialog.")
self.td.get_text()
class Application:
def __init__(self):
print(
"current: {}- main: {}".format(
threading.current_thread(), threading.main_thread()
)
)
self.app = QtWidgets.QApplication([])
# By default if after opening a window all the windows are closed
# the application will be terminated, in this case after opening
# and closing the QInputDialog the application will be closed avoiding
# that it will be noticed that get_text_callback is called,
# to avoid the above it is deactivated behavior.
self.app.setQuitOnLastWindowClosed(False)
self.manager = Manager()
def main_proc(self):
#
self.thread = QtCore.QThread()
self.thread.start()
self.worker = Worker1()
# move the worker so that it lives in the thread that handles the QThread
self.worker.moveToThread(self.thread)
# calling function asynchronously
# will cause the function to run on the worker's thread
QtCore.QTimer.singleShot(
0, partial(self.worker.thread_proc, self.manager)
)
def run(self):
return self.app.exec_()
if __name__ == "__main__":
import sys
m = Application()
m.main_proc()
ret = m.run()
sys.exit(ret)

How To Abort Threads that Pull Items from a Queue Using Ctrl+C In Python

I've implemented some threaded application using python. During runtime i want to catch the CTRL+C sigcall and exit the program. To do that I've registered a function called exit_gracefully which also takes care of stopping the threads in a more controlled way. However, it does not seem to work. It seems the handler is never called
Here's the example I'm working with:
import Queue
import threading
import signal
import sys
import time
queue = Queue.Queue()
workers = list()
def callback(id, item):
print("{}: {}".format(id, item))
time.sleep(1)
def exit_gracefully(signum, frame):
print("Ctrl+C was pressed. Shutting threads down ...")
print("Stopping workers ...")
for worker in workers:
worker.stop()
sys.exit(1)
class ThreadedTask(threading.Thread):
def __init__(self, id, queue, callbacks):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
self.id = str(id)
self.queue = queue
self.callbacks = callbacks
self._stopped = False
def run(self):
while not self.stopped():
item = self.queue.get()
for callback in self.callbacks:
callback(self.id, item)
self.queue.task_done()
def stop(self):
self._stop_event.set()
self._stopped = True
def stopped(self):
return self._stop_event.is_set() or self._stopped
def main(input_file, thread_count, callbacks):
print("Initializing queue ...")
queue = Queue.Queue()
print("Parsing '{}' ...".format(input_file))
with open(input_file) as f:
for line in f:
queue.put(line.replace("\n", ""))
print("Initializing {} threads ...".format(thread_count))
for id in range(thread_count):
worker = ThreadedTask(id, queue, callbacks)
worker.setDaemon(True)
workers.append(worker)
print("Starting {} threads ...".format(thread_count))
for worker in workers:
worker.start()
queue.join()
if __name__ == '__main__':
signal.signal(signal.SIGINT, exit_gracefully)
print("Starting main ...")
input_file = "list.txt"
thread_count = 10
callbacks = [
callback
]
main(input_file, thread_count, callbacks)
If you want to try the example above you may generate some test-data first:
seq 1 10000 > list.txt
Any help is appreciated!
Here's a solution that seems to work.
One issue is that Queue.get() will ignore SIGINT unless a timeout is set. That's documented here: https://bugs.python.org/issue1360.
Another issue is that Queue.join() also seems to ignore SIGINT. I worked around that by polling the queue in a loop to see if it's empty.
These issues appear to have been fixed in Python 3.
I also added a shared event that's used in the SIGINT handler to tell all the threads to shut down.
import Queue
import signal
import sys
import threading
import time
def callback(id, item):
print '{}: {}'.format(id, item)
time.sleep(1)
class ThreadedTask(threading.Thread):
def __init__(self, id, queue, run_event, callbacks):
super(ThreadedTask, self).__init__()
self.id = id
self.queue = queue
self.run_event = run_event
self.callbacks = callbacks
def run(self):
queue = self.queue
while not self.run_event.is_set():
try:
item = queue.get(timeout=0.1)
except Queue.Empty:
pass
else:
for callback in self.callbacks:
callback(self.id, item)
queue.task_done()
def main():
queue = Queue.Queue()
run_event = threading.Event()
workers = []
def stop():
run_event.set()
for worker in workers:
# Allow worker threads to shut down completely
worker.join()
def sigint_handler(signum, frame):
print '\nShutting down...'
stop()
sys.exit(0)
signal.signal(signal.SIGINT, sigint_handler)
callbacks = [callback]
for id in range(1, 11):
worker = ThreadedTask(id, queue, run_event, callbacks)
workers.append(worker)
for worker in workers:
worker.start()
with open('list.txt') as fp:
for line in fp:
line = line.strip()
queue.put(line)
while not queue.empty():
time.sleep(0.1)
# Update: Added this to gracefully shut down threads after all
# items are consumed from the queue.
stop()
if __name__ == '__main__':
main()

Threaded upload with timeout in Python and PySide

I am looking for a design pattern based on threading.Thread, multiprocessing or Queue to upload a list of items with a timeout. The thread allows the GUI to remain responsive. If the connection hangs, then the timeout should trigger and the program should gracefully exit.
The example below works, but the GUI remains blocked. How could this be improved to allow for uploading of the list, manual canceling of the process, timeout of the upload process plus a non-blocking GUI?
from PySide.QtGui import *
from PySide.QtCore import *
import sys
import time
import threading
class UploadWindow(QDialog):
def __init__(self, parent=None):
super(UploadWindow, self).__init__(parent)
self.uploadBtn = QPushButton('Upload')
mainLayout = QVBoxLayout()
mainLayout.addWidget(self.uploadBtn)
self.uploadBtn.clicked.connect(self.do_upload)
self.progressDialog = QProgressDialog(self)
self.progressDialog.canceled.connect(self.cancelDownload)
self.progressDialog.hide()
self.setLayout(mainLayout)
self.show()
self.raise_()
def do_upload(self):
self.uploadBtn.setEnabled(False)
self.progressDialog.setMaximum(10)
self.progressDialog.show()
self.upload_thread = UploadThread(self)
self.upload_thread.start()
self.upload_thread_stopped = False
#List of items to upload
for i in range(10):
self.upload_thread = UploadThread(i)
self.upload_thread.start()
self.upload_thread.join(5)
self.progressDialog.setValue(i)
if self.upload_thread_stopped:
break
self.progressDialog.hide()
self.uploadBtn.setEnabled(True)
def cancelDownload(self):
self.upload_thread_stopped = True
class UploadThread(threading.Thread):
def __init__(self, i):
super(UploadThread, self).__init__()
self.i = i
self.setDaemon(True)
def run(self):
time.sleep(0.25) #simulate upload time
print self.i
if __name__ == '__main__':
app = QApplication(sys.argv)
w = UploadWindow()
sys.exit(app.exec_())
The GUI is not responsive because you do all the work in do_upload, never going back to the main loop.
Also, you call Thread.join(), that blocks everything until the thread is done (see https://docs.python.org/2/library/threading.html#threading.Thread.join)
You should use PySide.QtCore.QThread to take advantage of signals and slots.
Here is a nice example in C++. I implemented it in Python3.4 with PyQt here, but you should be able to use it with PySide too.
You might also want to look at PySide.QtCore.QProcess, to avoid using threads.
Here, I put some code together that does what I think you want.
For a real project, be sure to keep track of what's uploaded better &/or use something safer than .terminate() to stop the thread on demand.
import sys
from PySide import QtGui, QtCore
import time
class MySigObj(QtCore.QObject):
strSig = QtCore.Signal(str)
tupSig = QtCore.Signal(tuple)
class UploadThread(QtCore.QThread):
def __init__(self, parent=None):
super(UploadThread, self).__init__(parent)
self.endNow = False
self.fileName = None
self.sig = MySigObj()
self.fileNames = []
self.uploaded = []
#QtCore.Slot(str)
def setFileNames(self, t):
self.fileNames = list(t)
def run(self):
while self.fileNames:
print(self.fileNames)
time.sleep(2)
name = self.fileNames.pop(0)
s = 'uploaded file: ' + name + '\n'
print(s)
self.sig.strSig.emit(s)
self.uploaded.append(name)
if len(self.fileNames) == 0:
self.sig.strSig.emit("files transmitted: %s" % str(self.uploaded))
else:
time.sleep(1) #if the thread started but no list, wait 1 sec every cycle thru
#that was this thread should release the Python GIL (Global Interpreter Lock)
class ULoadWin(QtGui.QWidget):
def __init__(self, parent=None):
super(ULoadWin, self).__init__(parent)
self.upThread = UploadThread()
self.sig = MySigObj()
self.sig.tupSig.connect(self.upThread.setFileNames)
self.upThread.sig.strSig.connect(self.txtMsgAppend)
self.sig.tupSig.connect(self.upThread.setFileNames)
self.layout = QtGui.QVBoxLayout()
self.stButton = QtGui.QPushButton("Start")
self.stButton.clicked.connect(self.uploadItems)
self.stpButton = QtGui.QPushButton("Stop")
self.stpButton.clicked.connect(self.killThread)
self.testButton = QtGui.QPushButton("write txt\n not(?) blocked \nbelow")
self.testButton.setMinimumHeight(28)
self.testButton.clicked.connect(self.tstBlking)
self.lbl = QtGui.QTextEdit()
self.lbl.setMinimumHeight(325)
self.lbl.setMinimumWidth(290)
self.layout.addWidget(self.stButton)
self.layout.addWidget(self.stpButton)
self.layout.addWidget(self.testButton)
self.layout.addWidget(self.lbl)
self.setLayout(self.layout)
self.l = ['a', 'list', 'of_files', 'we', 'will_pretend_to_upload', 'st', 'uploading']
self.upThread.start()
def tstBlking(self):
self.lbl.append("txt not(?) blocked")
def uploadItems(self):
t = tuple(self.l)
self.sig.tupSig.emit(t)
self.upThread.start()
def killThread(self):
self.upThread.terminate()
time.sleep(.01)
self.upThread = UploadThread()
#QtCore.Slot(str)
def txtMsgAppend(self, txt):
self.lbl.append(txt + " | ")
if __name__ == '__main__':
app=QtGui.QApplication(sys.argv)
widg=ULoadWin()
widg.show()
sys.exit(app.exec_())
In the end I solved this problem by adapting the approach outlined here, which I find to be an elegant solution. The class I created is shown below:
class UploadThread(threading.Thread):
#input_q and result_q are Queue.Queue objects
def __init__(self, input_q, result_q):
super(UploadThread, self).__init__()
self.input_q = input_q
self.result_q = result_q
self.stoprequest = threading.Event() #threadsafe flag
def run(self):
'''Runs indefinitely until self.join() is called.
As soon as items are placed in the input_q, then the thread will process them until the input_q is emptied.
'''
while not self.stoprequest.isSet(): #stoprequest can be set from the main gui
try:
# Queue.get with timeout to allow checking self.stoprequest
num = self.input_q.get(True, 0.1) #when the queue is empty it waits 100ms before raising the Queue.Empty error
print 'In thread, processing', num
time.sleep(0.5)
self.result_q.put(True) #Indicate to the main thread that an item was successfully processed.
except Queue.Empty as e:
continue
def join(self, timeout=None):
self.stoprequest.set()
super(UploadThread, self).join(timeout)
In the main thread, the upload thread is created and the input_q is loaded with the items to upload. A QTimer is created to regularly check on the progress of the upload by checking what has been placed into the result_q. It also update the progress bar. If no progress has been made within a timeout, this indicates the upload connection has failed.
An advantage of using Queue.Queue objects for communicating between threads, is that multiple threads that share the same input and result queue can be created.

python multiprocessing/threading cleanup

I have a python tool, that has basically this kind of setup:
main process (P1) -> spawns a process (P2) that starts a tcp connection
-> spawns a thread (T1) that starts a loop to receive
messages that are sent from P2 to P1 via a Queue (Q1)
server process (P2) -> spawns two threads (T2 and T3) that start loops to
receive messages that are sent from P1 to P2 via Queues (Q2 and Q3)
The problem I'm having is that when I stop my program (with Ctrl+C), it doesn't quit. The server process is ended, but the main process just hangs there and I have to kill it.
The thread loop functions all look the same:
def _loop(self):
while self.running:
res = self.Q1.get()
if res is None:
break
self._handle_msg(res)
All threads are started as daemon:
t = Thread(target=self._loop)
t.setDaemon(True)
t.start()
In my main process, I use atexit, to perform clean-up tasks:
atexit.register(self.on_exit)
Those clean-up tasks are essentially the following:
1) set self.running in P1 to False and sent None to Q1, so that the Thread T1 should finish
self.running = False
self.Q1.put(None)
2) send a message to P2 via Q2 to inform this process that it is ending
self.Q2.put("stop")
3) In P2, react to the "stop" message and do what we did in P1
self.running = False
self.Q2.put(None)
self.Q3.put(None)
That is it and in my understanding, that should make everything shut down nicely, but it doesn't.
The main code of P1 also contains the following endless loop, because otherwise the program would end prematurely:
while running:
sleep(1)
Maybe that has something to do with the problem, but I cannot see why it should.
So what did I do wrong? Does my setup have major design flaws? Did I forget to shut down something?
EDIT
Ok, I modified my code and managed to make it shut down correctly most of the time. Unfortunately, from now and then, it still got stuck.
I managed to write a small working example of my code. To demonstrate what happens, you need to simple start the script and then use Ctrl + C to stop it. It looks like the issue appears now usually if you press Ctrl + C as soon as possible after starting the tool.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import signal
import sys
import logging
from multiprocessing import Process, Queue
from threading import Thread
from time import sleep
logger = logging.getLogger("mepy-client")
class SocketClientProtocol(object):
def __init__(self, q_in, q_out, q_binary):
self.q_in = q_in
self.q_out = q_out
self.q_binary = q_binary
self.running = True
t = Thread(target=self._loop)
#t.setDaemon(True)
t.start()
t = Thread(target=self._loop_binary)
#t.setDaemon(True)
t.start()
def _loop(self):
print "start of loop 2"
while self.running:
res = self.q_in.get()
if res is None:
break
self._handle_msg(res)
print "end of loop 2"
def _loop_binary(self):
print "start of loop 3"
while self.running:
res = self.q_binary.get()
if res is None:
break
self._handle_binary(res)
print "end of loop 3"
def _handle_msg(self, msg):
msg_type = msg[0]
if msg_type == "stop2":
print "STOP RECEIVED"
self.running = False
self.q_in.put(None)
self.q_binary.put(None)
def _put_msg(self, msg):
self.q_out.put(msg)
def _handle_binary(self, data):
pass
def handle_element(self):
self._put_msg(["something"])
def run_twisted(q_in, q_out, q_binary):
s = SocketClientProtocol(q_in, q_out, q_binary)
while s.running:
sleep(2)
s.handle_element()
class MediatorSender(object):
def __init__(self):
self.q_in = None
self.q_out = None
self.q_binary = None
self.p = None
self.running = False
def start(self):
if self.running:
return
self.running = True
self.q_in = Queue()
self.q_out = Queue()
self.q_binary = Queue()
print "!!!!START"
self.p = Process(target=run_twisted, args=(self.q_in, self.q_out, self.q_binary))
self.p.start()
t = Thread(target=self._loop)
#t.setDaemon(True)
t.start()
def stop(self):
print "!!!!STOP"
if not self.running:
return
print "STOP2"
self.running = False
self.q_out.put(None)
self.q_in.put(["stop2"])
#self.q_in.put(None)
#self.q_binary.put(None)
try:
if self.p and self.p.is_alive():
self.p.terminate()
except:
pass
def _loop(self):
print "start of loop 1"
while self.running:
res = self.q_out.get()
if res is None:
break
self._handle_msg(res)
print "end of loop 1"
def _handle_msg(self, msg):
self._put_msg(msg)
def _put_msg(self, msg):
self.q_in.put(msg)
def _put_binary(self, msg):
self.q_binary.put(msg)
def send_chunk(self, chunk):
self._put_binary(chunk)
running = True
def signal_handler(signal, frame):
global running
if running:
running = False
ms.stop()
else:
sys.exit(0)
if __name__ == "__main__":
signal.signal(signal.SIGINT, signal_handler)
ms = MediatorSender()
ms.start()
for i in range(100):
ms.send_chunk("some chunk of data")
while running:
sleep(1)
I think you're corrupting your multiprocessing.Queue by calling p.terminate() on on the child process. The docs have a warning about this:
Warning: If this method is used when the associated process is using a
pipe or queue then the pipe or queue is liable to become corrupted and
may become unusable by other process. Similarly, if the process has
acquired a lock or semaphore etc. then terminating it is liable to
cause other processes to deadlock.
In some cases, it looks like p is terminating before your MediatorSender._loop method can consume the sentinel you loaded into it to let it know that it should exit.
Also, you're installing a signal handler that expects to work in the main process only, but the SIGINT is actually received by both the parent and the child processes, which means signal_handler gets called in both processes, could result in ms.stop getting called twice, due to a race condition in the way you handle setting ms.running to False
I would recommend just exploiting that both processes receive the SIGINT, and have both the parent and child handle KeyboardInterrupt directly. That way, each then have each shut themselves down cleanly, rather than have the parent terminate the child. The following code demonstrates that, and in my testing never hung. I've simplified your code in a few places, but functionally it's exactly the same:
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from multiprocessing import Process, Queue
from threading import Thread
from time import sleep
logger = logging.getLogger("mepy-client")
class SocketClientProtocol(object):
def __init__(self, q_in, q_out, q_binary):
self.q_in = q_in
self.q_out = q_out
self.q_binary = q_binary
t = Thread(target=self._loop)
t.start()
t = Thread(target=self._loop_binary)
t.start()
def _loop(self):
print("start of loop 2")
for res in iter(self.q_in.get, None):
self._handle_msg(res)
print("end of loop 2")
def _loop_binary(self):
print("start of loop 3")
for res in iter(self.q_binary.get, None):
self._handle_binary(res)
print("end of loop 3")
def _handle_msg(self, msg):
msg_type = msg[0]
if msg_type == "stop2":
self.q_in.put(None)
self.q_binary.put(None)
def _put_msg(self, msg):
self.q_out.put(msg)
def stop(self):
print("STOP RECEIVED")
self.q_in.put(None)
self.q_binary.put(None)
def _handle_binary(self, data):
pass
def handle_element(self):
self._put_msg(["something"])
def run_twisted(q_in, q_out, q_binary):
s = SocketClientProtocol(q_in, q_out, q_binary)
try:
while True:
sleep(2)
s.handle_element()
except KeyboardInterrupt:
s.stop()
class MediatorSender(object):
def __init__(self):
self.q_in = None
self.q_out = None
self.q_binary = None
self.p = None
self.running = False
def start(self):
if self.running:
return
self.running = True
self.q_in = Queue()
self.q_out = Queue()
self.q_binary = Queue()
print("!!!!START")
self.p = Process(target=run_twisted,
args=(self.q_in, self.q_out, self.q_binary))
self.p.start()
self.loop = Thread(target=self._loop)
self.loop.start()
def stop(self):
print("!!!!STOP")
if not self.running:
return
print("STOP2")
self.running = False
self.q_out.put(None)
def _loop(self):
print("start of loop 1")
for res in iter(self.q_out.get, None):
self._handle_msg(res)
print("end of loop 1")
def _handle_msg(self, msg):
self._put_msg(msg)
def _put_msg(self, msg):
self.q_in.put(msg)
def _put_binary(self, msg):
self.q_binary.put(msg)
def send_chunk(self, chunk):
self._put_binary(chunk)
if __name__ == "__main__":
ms = MediatorSender()
try:
ms.start()
for i in range(100):
ms.send_chunk("some chunk of data")
# You actually have to join w/ a timeout in a loop on
# Python 2.7. If you just call join(), SIGINT won't be
# received by the main process, and the program will
# hang. This is a bug, and is fixed in Python 3.x.
while True:
ms.loop.join()
except KeyboardInterrupt:
ms.stop()
Edit:
If you prefer to use a signal handler rather than catching KeyboardInterrupt, you just need to make sure the child process uses its own signal handler, rather than inheriting the parent's:
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import signal
import logging
from functools import partial
from multiprocessing import Process, Queue
from threading import Thread
from time import sleep
logger = logging.getLogger("mepy-client")
class SocketClientProtocol(object):
def __init__(self, q_in, q_out, q_binary):
self.q_in = q_in
self.q_out = q_out
self.q_binary = q_binary
self.running = True
t = Thread(target=self._loop)
t.start()
t = Thread(target=self._loop_binary)
t.start()
def _loop(self):
print("start of loop 2")
for res in iter(self.q_in.get, None):
self._handle_msg(res)
print("end of loop 2")
def _loop_binary(self):
print("start of loop 3")
for res in iter(self.q_binary.get, None):
self._handle_binary(res)
print("end of loop 3")
def _handle_msg(self, msg):
msg_type = msg[0]
if msg_type == "stop2":
self.q_in.put(None)
self.q_binary.put(None)
def _put_msg(self, msg):
self.q_out.put(msg)
def stop(self):
print("STOP RECEIVED")
self.running = False
self.q_in.put(None)
self.q_binary.put(None)
def _handle_binary(self, data):
pass
def handle_element(self):
self._put_msg(["something"])
def run_twisted(q_in, q_out, q_binary):
s = SocketClientProtocol(q_in, q_out, q_binary)
signal.signal(signal.SIGINT, partial(signal_handler_child, s))
while s.running:
sleep(2)
s.handle_element()
class MediatorSender(object):
def __init__(self):
self.q_in = None
self.q_out = None
self.q_binary = None
self.p = None
self.running = False
def start(self):
if self.running:
return
self.running = True
self.q_in = Queue()
self.q_out = Queue()
self.q_binary = Queue()
print("!!!!START")
self.p = Process(target=run_twisted,
args=(self.q_in, self.q_out, self.q_binary))
self.p.start()
self.loop = Thread(target=self._loop)
self.loop.start()
def stop(self):
print("!!!!STOP")
if not self.running:
return
print("STOP2")
self.running = False
self.q_out.put(None)
def _loop(self):
print("start of loop 1")
for res in iter(self.q_out.get, None):
self._handle_msg(res)
print("end of loop 1")
def _handle_msg(self, msg):
self._put_msg(msg)
def _put_msg(self, msg):
self.q_in.put(msg)
def _put_binary(self, msg):
self.q_binary.put(msg)
def send_chunk(self, chunk):
self._put_binary(chunk)
def signal_handler_main(ms, *args):
ms.stop()
def signal_handler_child(s, *args):
s.stop()
if __name__ == "__main__":
ms = MediatorSender()
signal.signal(signal.SIGINT, partial(signal_handler_main, ms))
ms.start()
for i in range(100):
ms.send_chunk("some chunk of data")
while ms.loop.is_alive():
ms.loop.join(9999999)
print('done main')
Maybe you should try to capture SIGINT signal, which is generated by Ctrl + C using signal.signal like this:
#!/usr/bin/env python
import signal
import sys
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print('Press Ctrl+C')
signal.pause()
Code stolen from here
This usually works for me if I am using the threading module. It will not work if you use the multiprocessing one though. If you are running the script from the terminal try running it in the background, like this.
python scriptFoo.py &
After you run the process it will output the PID like this
[1] 23107
Whenever you need to quit the script you just type kill and the script PID like this.
kill 23107
Hit enter again and it should kill all the subprocesses and output this.
[1]+ Terminated python scriptFoo.py
As far as I know you cannot kill all the subprocesses with 'Ctrl+C'

Categories