How to pause multiprocessing Pool from execution - python

While using:
def myFunction(arg):
for i in range(10000):
pass
from multiprocessing import Pool
pool = Pool(processes=3)
pool.map_async( myFunction, ['first','second','third'] )
I want the user to be able to pause an execution of the multiprocessing's Pool at any given time after the Pool was started. Then I would like the user to be able to unpause (to continue) with the rest of the items in a Pool. How to achieve it?
EDIT:
Here is the working implementation of suggestions posted by Blckknght. Thanks Blckknght!
import multiprocessing
from PyQt4 import QtGui, QtCore
def setup(event):
global unpaused
unpaused = event
def myFunction( arg=None):
unpaused.wait()
print "Task started...", arg
for i in range(15000000):
pass
print '...task completed.', arg
class MyApp(object):
def __init__(self):
super(MyApp, self).__init__()
app = QtGui.QApplication(sys.argv)
self.mainWidget = QtGui.QWidget()
self.mainLayout = QtGui.QVBoxLayout()
self.mainWidget.setLayout(self.mainLayout)
self.groupbox = QtGui.QGroupBox()
self.layout = QtGui.QVBoxLayout()
self.groupbox.setLayout(self.layout)
self.pauseButton = QtGui.QPushButton('Pause')
self.pauseButton.clicked.connect(self.pauseButtonClicked)
self.layout.addWidget(self.pauseButton)
self.okButton = QtGui.QPushButton('Start Pool')
self.okButton.clicked.connect(self.startPool)
self.layout.addWidget(self.okButton)
self.layout.addWidget(self.pauseButton)
self.mainLayout.addWidget(self.groupbox)
self.mainWidget.show()
sys.exit(app.exec_())
def startPool(self):
self.event = multiprocessing.Event()
self.pool=multiprocessing.Pool(1, setup, (self.event,))
self.result=self.pool.map_async(myFunction, [1,2,3,4,5,6,7,8,9,10])
self.event.set()
# self.result.wait()
def pauseJob(self):
self.event.clear()
def continueJob(self):
self.event.set()
def pauseButtonClicked(self):
if self.pauseButton.text()=='Pause':
print '\n\t\t ...pausing job...','\n'
self.pauseButton.setText('Resume')
self.pauseJob()
else:
print '\n\t\t ...resuming job...','\n'
self.pauseButton.setText('Pause')
self.continueJob()
if __name__ == '__main__':
MyApp()

It sounds like you want to use a multiprocessing.Event to control the running of your worker function. You can create one, then pass it to an initializer of the pool, then wait on it in myFunction.
Here's an example that runs workers that print their argument every second. The workers can be paused by clearing the event, and restarted by setting it again.
from time import sleep
import multiprocessing
def setup(event):
global unpaused
unpaused = event
def myFunction(arg):
for i in range(10):
unpaused.wait()
print(arg)
sleep(1)
if __name__ == "__main__":
event = multiprocessing.Event() # initially unset, so workers will be paused at first
pool = multiprocessing.Pool(3, setup, (event,))
result = pool.map_async(myFunction, ["foo", "bar", "baz"])
event.set() # unpause workers
sleep(5)
event.clear() # pause after five seconds
sleep(5)
event.set() # unpause again after five more seconds
result.wait() # wait for the rest of the work to be completed
The worker processes should print "foo", "bar" and "baz" ten times each, with a one second delay between each repetition. The workers will be paused after the first five seconds though, and restarted after a five more seconds. There are probably various ways to improve this code, depending on what your actual use case is, but hopefully it is enough to get you headed in the right direction.

Related

In QThread: calls a function again 1 second after the function finishes

I want to acquire a spectrum of a spectrometer every 1s, and below is the code I wrote for this.
However, when the spectrometer is triggered at less than 1 Hz,
the function asking for a spectrum will wait for a trigger and take longer than 1s to process, while the timer has already timed out.
This cause the thread to be very laggy and eventually freeze.
What is a better way to write it so that it calls the function 'acquire_spectrum' 1s after the function finishes itself, instead of just calling it every 1s?
class Spec_Thread(QThread):
def __init__(self):
QThread.__init__(self)
self.signals = Signals()
self.specth = Spectrometer.from_first_available() #connect to the spectrometer
self.threadtimer = QTimer()
self.threadtimer.moveToThread(self)
self.threadtimer.timeout.connect(self.acquire_spectrum)
def acquire_spectrum(self): #acquire the current spectrum from the spectrometer
print('in thread,', device_running)
if device_running == True:
self.specth.open() #open the usb portal
self.wavelengths = self.specth.wavelengths() #acquire wavelengths (will wait for a trigger)
self.intensities = self.specth.intensities() #acquire intensities (will wait for a trigger)
self.specth.close() #close usb portal
self.signals.new_spectrum.emit(self.wavelengths, self.intensities)
else:
print('Device stopped')
return
def run(self):
self.threadtimer.start(10000)
loop = QEventLoop()
loop.exec_()
You have to start a timer after finishing the task execution, and for this you can threading.Timer():
import threading
from PyQt5 import QtCore
import numpy as np
from seabreeze.spectrometers import Spectrometer
class QSpectrometer(QtCore.QObject):
dataChanged = QtCore.pyqtSignal(np.ndarray, np.ndarray)
def __init__(self, parent=None):
super().__init__(parent)
self.specth = Spectrometer.from_first_available()
def start(self, repeat=False):
threading.Thread(target=self._read, args=(repeat,), daemon=True).start()
def _read(self, repeat):
self.specth.open()
wavelengths = self.specth.wavelengths()
intensities = self.specth.intensities()
self.specth.close()
self.dataChanged.emit(wavelengths, intensities)
if repeat:
threading.Timer(1, self._read, (repeat,)).start()
if __name__ == "__main__":
import sys
app = QtCore.QCoreApplication(sys.argv)
spectometer = QSpectrometer()
spectometer.start(True)
spectometer.dataChanged.connect(print)
sys.exit(app.exec_())

PyQt Progress Bar Update with Threads

I have been writing a program which runs a remote script on server. So, I need to show the progress with a bar but somehow when I run my code, GUI start to freeze. I have used QThread and SIGNAL but unfortunately couldnt be succedeed.
Here is my code below;
class dumpThread(QThread):
def __init__(self):
QThread.__init__(self)
def __del__(self):
self.wait()
def sendEstablismentCommands(self, connection):
# Commands are sending sequently with proper delay-timers #
connection.sendShell("telnet localhost 21000")
time.sleep(0.5)
connection.sendShell("admin")
time.sleep(0.5)
connection.sendShell("admin")
time.sleep(0.5)
connection.sendShell("cd imdb")
time.sleep(0.5)
connection.sendShell("dump subscriber")
command = input('$ ')
def run(self):
# your logic here
# self.emit(QtCore.SIGNAL('THREAD_VALUE'), maxVal)
self.sendEstablismentCommands(connection)
class progressThread(QThread):
def __init__(self):
QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
# your logic here
while 1:
maxVal = 100
self.emit(SIGNAL('PROGRESS'), maxVal)
class Main(QtGui.QMainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.connectButton.clicked.connect(self.connectToSESM)
def connectToSESM(self):
## Function called when pressing connect button, input are being taken from edit boxes. ##
## dumpThread() method has been designed for working thread seperate from GUI. ##
# Connection data are taken from "Edit Boxes"
# username has been set as hardcoded
### Values Should Be Defined As Global ###
username = "ntappadm"
password = self.ui.passwordEdit.text()
ipAddress = self.ui.ipEdit.text()
# Connection has been established through paramiko shell library
global connection
connection = pr.ssh(ipAddress, username, password)
connection.openShell()
pyqtRemoveInputHook() # For remove unnecessary items from console
global get_thread
get_thread = dumpThread() # Run thread - Dump Subscriber
self.progress_thread = progressThread()
self.progress_thread.start()
self.connect(self.progress_thread, SIGNAL('PROGRESS'), self.updateProgressBar)
get_thread.start()
def updateProgressBar(self, maxVal):
for i in range(maxVal):
self.ui.progressBar.setValue(self.ui.progressBar.value() + 1)
time.sleep(1)
maxVal = maxVal - 1
if maxVal == 0:
self.ui.progressBar.setValue(100)
def parseSubscriberList(self):
parsing = reParser()
def done(self):
QtGui.QMessageBox.information(self, "Done!", "Done fetching posts!")
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
main = Main()
main.show()
sys.exit(app.exec_())
I am expecting to see updateProgressBar method has called with SIGNAL, so process goes through seperate thread. I coudlnt find where I am missing.
Thanks for any help
There are really two problems. One thing I have noticed is that Python threads are greedy if they are not used for IO operations like reading from a serial port. If you tell a thread to run a calculation or something that is not IO related a thread will take up all of the processing and doesn't like to let the main thread / event loop run. The second problem is that signals are slow ... very slow. I've noticed that if you emit a signal from a thread and do it very fast it can drastically slow down a program.
So at the heart of the issue, the thread is taking up all of the time and you are emitting a signal very very fast which will cause slow downs.
For clarity and ease of use I would use the new style signal and slots.
http://pyqt.sourceforge.net/Docs/PyQt4/new_style_signals_slots.html
class progressThread(QThread):
progress_update = QtCore.Signal(int) # or pyqtSignal(int)
def __init__(self):
QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
# your logic here
while 1:
maxVal = 100
self.progress_update.emit(maxVal) # self.emit(SIGNAL('PROGRESS'), maxVal)
# Tell the thread to sleep for 1 second and let other things run
time.sleep(1)
To connect to the new style signal
...
self.progress_thread.start()
self.process_thread.progress_update.connect(self.updateProgressBar) # self.connect(self.progress_thread, SIGNAL('PROGRESS'), self.updateProgressBar)
...
EDIT
Sorry there is another problem. When a signal calls a function you cannot stay in that function forever. That function is not running in a separate thread it is running on the main event loop and the main event loop waits to run until you exit that function.
Update progress sleeps for 1 second and keeps looping. The hanging is coming from staying in this function.
def updateProgressBar(self, maxVal):
for i in range(maxVal):
self.ui.progressBar.setValue(self.ui.progressBar.value() + 1)
time.sleep(1)
maxVal = maxVal - 1
if maxVal == 0:
self.ui.progressBar.setValue(100)
It would be better to write the progress bar like
class progressThread(QThread):
progress_update = QtCore.Signal(int) # or pyqtSignal(int)
def __init__(self):
QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
# your logic here
while 1:
maxVal = 1 # NOTE THIS CHANGED to 1 since updateProgressBar was updating the value by 1 every time
self.progress_update.emit(maxVal) # self.emit(SIGNAL('PROGRESS'), maxVal)
# Tell the thread to sleep for 1 second and let other things run
time.sleep(1)
def updateProgressBar(self, maxVal):
self.ui.progressBar.setValue(self.ui.progressBar.value() + maxVal)
if maxVal == 0:
self.ui.progressBar.setValue(100)

Why the threads are not released after all work is consumed from python Queue

I use Queue to provide tasks that threads can work on. After all work is done from Queue, I see the threads are still alive while I expected them being released. Here is my code. You can see the active threads number is increasing after a batch of task(in the same queue) increases from the console. How could I release the threads after a batch of work get done?
import threading
import time
from Queue import Queue
class ThreadWorker(threading.Thread):
def __init__(self, task_queue):
threading.Thread.__init__(self)
self.task_queue = task_queue
def run(self):
while True:
work = self.task_queue.get()
#do some work
# do_work(work)
time.sleep(0.1)
self.task_queue.task_done()
def get_batch_work_done(works):
task_queue = Queue()
for _ in range(5):
t = ThreadWorker(task_queue)
t.setDaemon(True)
t.start()
for work in range(works):
task_queue.put(work)
task_queue.join()
print 'get batch work done'
print 'active threads count is {}'.format(threading.activeCount())
if __name__ == '__main__':
for work_number in range(3):
print 'start with {}'.format(work_number)
get_batch_work_done(work_number)
Do a non blocking read in a loop and use the exception handling to terminate
def run(self):
try:
while True:
work = self.task_queue.get(True, 0.1)
#do some work
# do_work(work)
except Queue.Empty:
print "goodbye"

Threaded upload with timeout in Python and PySide

I am looking for a design pattern based on threading.Thread, multiprocessing or Queue to upload a list of items with a timeout. The thread allows the GUI to remain responsive. If the connection hangs, then the timeout should trigger and the program should gracefully exit.
The example below works, but the GUI remains blocked. How could this be improved to allow for uploading of the list, manual canceling of the process, timeout of the upload process plus a non-blocking GUI?
from PySide.QtGui import *
from PySide.QtCore import *
import sys
import time
import threading
class UploadWindow(QDialog):
def __init__(self, parent=None):
super(UploadWindow, self).__init__(parent)
self.uploadBtn = QPushButton('Upload')
mainLayout = QVBoxLayout()
mainLayout.addWidget(self.uploadBtn)
self.uploadBtn.clicked.connect(self.do_upload)
self.progressDialog = QProgressDialog(self)
self.progressDialog.canceled.connect(self.cancelDownload)
self.progressDialog.hide()
self.setLayout(mainLayout)
self.show()
self.raise_()
def do_upload(self):
self.uploadBtn.setEnabled(False)
self.progressDialog.setMaximum(10)
self.progressDialog.show()
self.upload_thread = UploadThread(self)
self.upload_thread.start()
self.upload_thread_stopped = False
#List of items to upload
for i in range(10):
self.upload_thread = UploadThread(i)
self.upload_thread.start()
self.upload_thread.join(5)
self.progressDialog.setValue(i)
if self.upload_thread_stopped:
break
self.progressDialog.hide()
self.uploadBtn.setEnabled(True)
def cancelDownload(self):
self.upload_thread_stopped = True
class UploadThread(threading.Thread):
def __init__(self, i):
super(UploadThread, self).__init__()
self.i = i
self.setDaemon(True)
def run(self):
time.sleep(0.25) #simulate upload time
print self.i
if __name__ == '__main__':
app = QApplication(sys.argv)
w = UploadWindow()
sys.exit(app.exec_())
The GUI is not responsive because you do all the work in do_upload, never going back to the main loop.
Also, you call Thread.join(), that blocks everything until the thread is done (see https://docs.python.org/2/library/threading.html#threading.Thread.join)
You should use PySide.QtCore.QThread to take advantage of signals and slots.
Here is a nice example in C++. I implemented it in Python3.4 with PyQt here, but you should be able to use it with PySide too.
You might also want to look at PySide.QtCore.QProcess, to avoid using threads.
Here, I put some code together that does what I think you want.
For a real project, be sure to keep track of what's uploaded better &/or use something safer than .terminate() to stop the thread on demand.
import sys
from PySide import QtGui, QtCore
import time
class MySigObj(QtCore.QObject):
strSig = QtCore.Signal(str)
tupSig = QtCore.Signal(tuple)
class UploadThread(QtCore.QThread):
def __init__(self, parent=None):
super(UploadThread, self).__init__(parent)
self.endNow = False
self.fileName = None
self.sig = MySigObj()
self.fileNames = []
self.uploaded = []
#QtCore.Slot(str)
def setFileNames(self, t):
self.fileNames = list(t)
def run(self):
while self.fileNames:
print(self.fileNames)
time.sleep(2)
name = self.fileNames.pop(0)
s = 'uploaded file: ' + name + '\n'
print(s)
self.sig.strSig.emit(s)
self.uploaded.append(name)
if len(self.fileNames) == 0:
self.sig.strSig.emit("files transmitted: %s" % str(self.uploaded))
else:
time.sleep(1) #if the thread started but no list, wait 1 sec every cycle thru
#that was this thread should release the Python GIL (Global Interpreter Lock)
class ULoadWin(QtGui.QWidget):
def __init__(self, parent=None):
super(ULoadWin, self).__init__(parent)
self.upThread = UploadThread()
self.sig = MySigObj()
self.sig.tupSig.connect(self.upThread.setFileNames)
self.upThread.sig.strSig.connect(self.txtMsgAppend)
self.sig.tupSig.connect(self.upThread.setFileNames)
self.layout = QtGui.QVBoxLayout()
self.stButton = QtGui.QPushButton("Start")
self.stButton.clicked.connect(self.uploadItems)
self.stpButton = QtGui.QPushButton("Stop")
self.stpButton.clicked.connect(self.killThread)
self.testButton = QtGui.QPushButton("write txt\n not(?) blocked \nbelow")
self.testButton.setMinimumHeight(28)
self.testButton.clicked.connect(self.tstBlking)
self.lbl = QtGui.QTextEdit()
self.lbl.setMinimumHeight(325)
self.lbl.setMinimumWidth(290)
self.layout.addWidget(self.stButton)
self.layout.addWidget(self.stpButton)
self.layout.addWidget(self.testButton)
self.layout.addWidget(self.lbl)
self.setLayout(self.layout)
self.l = ['a', 'list', 'of_files', 'we', 'will_pretend_to_upload', 'st', 'uploading']
self.upThread.start()
def tstBlking(self):
self.lbl.append("txt not(?) blocked")
def uploadItems(self):
t = tuple(self.l)
self.sig.tupSig.emit(t)
self.upThread.start()
def killThread(self):
self.upThread.terminate()
time.sleep(.01)
self.upThread = UploadThread()
#QtCore.Slot(str)
def txtMsgAppend(self, txt):
self.lbl.append(txt + " | ")
if __name__ == '__main__':
app=QtGui.QApplication(sys.argv)
widg=ULoadWin()
widg.show()
sys.exit(app.exec_())
In the end I solved this problem by adapting the approach outlined here, which I find to be an elegant solution. The class I created is shown below:
class UploadThread(threading.Thread):
#input_q and result_q are Queue.Queue objects
def __init__(self, input_q, result_q):
super(UploadThread, self).__init__()
self.input_q = input_q
self.result_q = result_q
self.stoprequest = threading.Event() #threadsafe flag
def run(self):
'''Runs indefinitely until self.join() is called.
As soon as items are placed in the input_q, then the thread will process them until the input_q is emptied.
'''
while not self.stoprequest.isSet(): #stoprequest can be set from the main gui
try:
# Queue.get with timeout to allow checking self.stoprequest
num = self.input_q.get(True, 0.1) #when the queue is empty it waits 100ms before raising the Queue.Empty error
print 'In thread, processing', num
time.sleep(0.5)
self.result_q.put(True) #Indicate to the main thread that an item was successfully processed.
except Queue.Empty as e:
continue
def join(self, timeout=None):
self.stoprequest.set()
super(UploadThread, self).join(timeout)
In the main thread, the upload thread is created and the input_q is loaded with the items to upload. A QTimer is created to regularly check on the progress of the upload by checking what has been placed into the result_q. It also update the progress bar. If no progress has been made within a timeout, this indicates the upload connection has failed.
An advantage of using Queue.Queue objects for communicating between threads, is that multiple threads that share the same input and result queue can be created.

Repeating function at timed interval with multithreads

I want to repeat a function at timed intervals. The issue I have is that the function runs another function in a separate thread and therefore doesn't seem to be working with my code.
From the example below, I want to repeat function1 every 60 seconds:
from multiprocessing import Process
from threading import Event
def function2(type):
print("Function2")
def function1():
print("Function1")
if __name__ == '__main__':
p = Process(target=function2, args=('type',))
p.daemon = True
p.start()
p.join()
function1()
To repeat the function I attempted to use the following code:
class TimedThread(Thread):
def __init__(self, event, wait_time, tasks):
Thread.__init__(self)
self.stopped = event
self.wait_time = wait_time
self.tasks = tasks
def run(self):
while not self.stopped.wait(0.5):
self.tasks()
stopFlag = Event()
thread = TimedThread(stopFlag, 60, function1)
thread.start()
Both snippets combined print "Function1" in a timed loop but also produce the following error:
AttributeError: Can't get attribute 'function2' on <module '__main__' (built-in)
Any help would be greatly appreciated.
You can wrap your function1, like:
def main():
while True:
time.sleep(60)
function1()
or you can have it run in a separate thread:
def main():
while True:
time.sleep(60)
t = threading.Thread(target=function1)
t.start()
It actually works for me, printing Function1 and Function2 over and over. Are these two snippets in the same file?
If you import function1 from a different module, then the if __name__ == '__main__' check will fail.
I managed to find an alternative, working solution. Instead of using processes, I achieved the desired results using threads.The differences between the two are well explained here.
from threading import Event, Thread
class TimedThread(Thread):
def __init__(self, event, wait_time):
Thread.__init__(self)
self.stopped = event
self.wait_time = wait_time
def run(self):
while not self.stopped.wait(self.wait_time):
self.function1()
def function2(self):
print("Function2 started from thread")
# Do something
def function1(self):
print("Function1 started from thread")
# Do something
temp_thread = Thread(target=self.function2)
temp_thread.start()
stopFlag = Event()
thread = TimedThread(stopFlag, 60)
thread.start()

Categories