system call in python - python

i want to make a system call in my python code but the problem is that it breaks the sequence of my original code..
for ex.
def call_host(self):
self.builder.get_object("windowMain").show()
os.system('python Adder.py')
self.builder.get_object("window1").show()
in above pygtk code once Adder.py is called next line wont execute i want system to execute adder.py and come back to my original code...
thnx in advance
Here is my code for Adder.py
import sys
try:
import pygtk
pygtk.require("2.0")
except:
pass
try:
import gtk
except:
print("GTK Not Availible")
sys.exit(1)
class adder:
result = 0
def __init__( self, number1, number2 ):
return None
def giveResult( self,number1,number2 ):
self.result = int( number1 ) + int( number2 )
return str(self.result)
class adderGui:
def __init__( self ):
self.builder = gtk.Builder()
self.builder.add_from_file("Adder.glade")
dic = {
"on_buttonQuit_clicked" : self.quit,
"on_buttonAdd_clicked" : self.add,
"on_windowMain_destroy" : self.quit,
}
self.builder.connect_signals( dic )
def add(self, widget):
entry1 = self.builder.get_object ("entry1")
entry2 = self.builder.get_object ("entry2")
try:
thistime = adder( entry1.get_text(), entry2.get_text() )
except ValueError:
self.builder.get_object("hboxWarning").show()
self.builder.get_object("entryResult").set_text("ERROR")
return 0
self.builder.get_object("hboxWarning").show()
#self.builder.get_object("image1").hide()
self.builder.get_object("entryResult").set_text(
thistime.giveResult(entry1.get_text(), entry2.get_text())
)
def quit(self, widget):
sys.exit(0)
adderGui = adderGui()
gtk.main()

If you use subprocess.Popen, your main program will continue running without "blocking" until the subprocess terminates.

os.system will run a command in a subprocess and wait for it to finish before running. If you want to run it in parallel with the parent process, then you should look at the subprocess module -- in particular, you'll want to create a subprocess.Popen object.

Related

TKINTER - How to keep the main loop from waiting a validate command?

I have an username entry field that validates it's contents by checking if it exists within the domain, the only problem is that this call takes about 1 ~ 2 seconds to return, is there a way to use threading to keep the mainloop from waiting the validate command?
Example code:
import tkinter as tk
import subprocess
from typing import NoReturn
class GUI(tk.Tk):
def __init__(self) -> None:
super().__init__()
self.geometry('300x300')
self.title('test app')
self.__CreateWidgets()
def Start(self) -> None:
self.mainloop()
def __CreateWidgets(self) -> None:
self.__errorlabel = tk.Label(
self,
text = '',
foreground = 'red'
)
self.__errorlabel.grid(column = 0, row = 0)
self.__userIDentry = tk.Entry(
self,
validate = 'focusout',
validatecommand = (self.register(self.__UserIDValidator), '%P'), #This is what I wan't to change.
invalidcommand = lambda: self.__errorlabel.config(
text = 'Invalid user ID'
)
)
self.__userIDentry.grid(column = 0, row = 1)
self.__userIDentry.bind('<FocusIn>', lambda _: self.__errorlabel.config(
text = ''
)
)
tk.Entry( #This is just a blank entry to click on to test the previous entry validation.
self,
).grid(column = 0, row = 2)
def __UserIDValidator(self, string:str) -> bool:
#This function takes about 1 ~ 2 seconds to return:
flag = False
if string:
p = subprocess.Popen(
f'net user /domain {string}',
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
error = p.stderr.read()
if not error:
flag = True
return flag
def main() -> NoReturn:
app = GUI()
app.Start()
if __name__ == '__main__':
main()
Running App
You should be able to copy and run the code above if you're on a domain, it basically sends a net user /domain command and checks if there was any error on the request, the program freezes until the validation is complete, so is there a way to run the validation and keep the mainloop from getting stuck?
I had a similar problem, I fixed it with threading:
import threading
def my_function():
something_somewhat_intensive()
def my_function_thr():
thr = threading.Thread(target=my_functioon)
thr.start()
Then you call my_function_thr() to begin that process on a thread. I don't know how this interacts with the import 'subprocess' at all, but this fixed a lot of problems that I had.
It should also be mentioned that if your process is resource intensive, and a bit more than like saving a file or something, use Multiprocessing. For a lot of processes, especially with just wanting the Tkinter window to not freeze, I do recommend threading.

Design pattern for non-blocking nested function calls in main thread of GUI?

I need some help with identifying a design pattern or a technique to make a PyQt GUI application non-blocking, when it has multiple nested calls which invoke one external subprocess after another. The pseudocode is:
class MainWindow:
def executeInSubprocess(self, theCommand):
subprocess.run(theCommand, check = True)
#program control reaches here only after the command executed by subprocess.run completes execution
def function1(self):
commandToExecute = ['python', '-m', 'someProgram.py', '--CLA_option']
self.executeInSubprocess(commandToExecute)
self.function2()
#some commands to update the GUI widgets
def function2(self):
#Some lines here gather data generated by running function1
self.function3(data)
def function3(self, data):
commandToExecute = ['python', '-m', 'anotherProgram.py', '--CLA_option']
for oneData in data:
newCommand = commandToExecute[:] #copy contents of list
newCommand.append('--someOption')
newCommand.append(oneData)
self.executeInSubprocess(newCommand)
When the user clicks a button on the GUI, the function1 gets invoked. The button remains disabled until the #some commands to update the GUI widgets line, where the button is re-enabled.
The problem:
How to design it in such a way that executeInSubprocess is non-blocking? I'd like to try replacing subprocess.run with QProcess, which would not wait until the external command completes, thereby not freezing the GUI.
executeInSubprocess is called inside a for loop multiple times in function3, and is dependent on the data generated by function1.
These functions are not too large, so it shouldn't be an issue refactoring it. What I need, is knowledge on a good way to design such a scenario. I've considered putting references to function1, function2 and function3 in a list and having a function in a class call them one by one, but then the executeInSubprocess in function3 complicates things.
Update: I've provided an example of how complex and intertwined the code can get.
#!/usr/bin/python
# -*- coding: ascii -*-
from PyQt5.QtWidgets import (QApplication, QMainWindow, QPushButton, QPlainTextEdit, QVBoxLayout, QWidget)
from PyQt5.QtCore import QProcess
from collections import deque
import sys
# Note: The objective was to design a way to execute a function and return control back to the main thread so that the GUI does not freeze.
# But it had to be done in a way that a function could invoke another function, which could invoke another function. Each of these functions
# can have one or more external commands that they need to run. So after invoking each external process, control has to return to the main thread
# and then after running the external process completes, the next command or function needs to run and return control to the main thread and
# so on until all functions and external commands are exhausted. What's given below is a very crude, tangled design that needs to be improved.
class ExternalProcessInvoker:
def __init__(self):
self.externalProcess = None
self.currentFunction = None
self.functionQueue = deque() #a sequence of functions to be executed
self.commandQueue = dict() #{the key is a function reference: the value is a list of commands [['python', 'prog1.py'], ['python', 'prog2.py'], ...]}
self.FIRST_ELEMENT_OF_LIST = 0
self.UTF8 = 'utf8'
def registerTheMessageFunction(self, functionReference):
self.message = functionReference
def startTheProcess(self):
while self.functionQueue:
#print('Elements in queue are: ', self.functionQueue)
theFunctionToExecute = self.functionQueue.popleft()
#---run the function
theFunctionToExecute(theFunctionToExecute) #The function may populate commandQueue when run
self.currentFunction = theFunctionToExecute
self.checkForMoreFunctionPopulatedCommandsToExecute()
def checkForMoreFunctionPopulatedCommandsToExecute(self):
#---check if commandQueue is populated with any external process that needs to be run
if self.currentFunction in self.commandQueue:
if self.commandQueue[self.currentFunction]:#contains some command
command = self.commandQueue[self.currentFunction].popleft()
#command = self.convertToUTF8(command)
if self.externalProcess is None: # No process running.
#---run the command
self.message('Executing ' + str(command)); print('Executing ', command)
self.externalProcess = QProcess()
self.externalProcess.readyReadStandardOutput.connect(self.functionForStdout)
self.externalProcess.readyReadStandardError.connect(self.functionForStderr)
self.externalProcess.stateChanged.connect(self.functionForStateChange)
self.externalProcess.finished.connect(self.processCompleted) # Clean up once complete.
self.externalProcess.start(command[self.FIRST_ELEMENT_OF_LIST], command[self.FIRST_ELEMENT_OF_LIST:]) #the first argument is the first command element (usually 'python'). The second argument is a list of all the remaining elements of the command
print('Process ID: ', self.externalProcess.processId())
else:
self.currentFunction = None
def convertToUTF8(self, command):
return [oneString.encode('utf-8') for oneString in command]
def stopTheProcess(self):
self.message('Attempting to stop the processes')
if self.externalProcess:
self.externalProcess.kill()
self.message('Also cleared ' + len(self.functionQueue) + ' items from execution queue')
self.functionQueue.clear()
def functionForStderr(self):
data = self.externalProcess.readAllStandardError()
stderr = bytes(data).decode(self.UTF8)
self.message(stderr)
def functionForStdout(self):
data = self.externalProcess.readAllStandardOutput()
stdout = bytes(data).decode(self.UTF8)
self.message(stdout)
def functionForStateChange(self, state):
states = {QProcess.NotRunning: 'Not running', QProcess.Starting: 'Starting', QProcess.Running: 'Running'}
state_name = states[state]
self.message(f'State changed: {state_name}')
def processCompleted(self):
self.message('Process finished.')
self.externalProcess = None
self.checkForMoreFunctionPopulatedCommandsToExecute()
def addThisFunctionToProcessingQueue(self, functionReference):
#print('Received request to add this function to processing queue: ', functionReference)
#print('What's already in processing queue: ', self.functionQueue)
self.functionQueue.append(functionReference)
def addThisCommandToProcessingList(self, command, functionReference):
if functionReference not in self.commandQueue:
self.commandQueue[functionReference] = deque()
self.commandQueue[functionReference].append(command)
#print('\nfunc ref: ', functionReference, ' command: ', command)
self.message('Added ' + str(command) + ' for processing')
#print(self.commandQueue)
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.proc = ExternalProcessInvoker()
self.proc.registerTheMessageFunction(self.message)
self.executeButton = QPushButton('Execute') #TODO: Disable button until processing completes or processes are killed
self.executeButton.pressed.connect(self.initiateProcessing)
self.killButton = QPushButton('Kill process') #TODO: Disable button until processing is started
self.killButton.pressed.connect(self.killProcessing)
self.textDisplay = QPlainTextEdit()
self.textDisplay.setReadOnly(True)
boxLayout = QVBoxLayout()
boxLayout.addWidget(self.executeButton)
boxLayout.addWidget(self.killButton)
boxLayout.addWidget(self.textDisplay)
theWidget = QWidget()
theWidget.setLayout(boxLayout)
self.setCentralWidget(theWidget)
def message(self, mssg):
self.textDisplay.appendPlainText(mssg)
def initiateProcessing(self):
print('Processing initiated')
#---first populate the functions and commands to execute
self.proc.addThisFunctionToProcessingQueue(self.function1)
self.proc.addThisFunctionToProcessingQueue(self.function2)
self.proc.addThisFunctionToProcessingQueue(self.function3)
self.proc.startTheProcess()
def function1(self, functionReference):
self.message('Executing function1')
command = ['python', 'dummyScript1.py']
self.proc.addThisCommandToProcessingList(command, functionReference)
def function2(self, functionReference):
self.message('Executing function2')
def function3(self, functionReference):
self.message('Executing function3')
maxInvocations = 5
secondsToSleep = 1
for _ in range(maxInvocations):
command = ['python', 'dummyScript2.py', str(secondsToSleep)]
self.proc.addThisCommandToProcessingList(command, functionReference)
secondsToSleep = secondsToSleep + 1
self.message('Processing complete')
print('Completed')
def killProcessing(self):
self.proc.stopTheProcess()
app = QApplication(sys.argv)
w = MainWindow()
w.show()
app.exec_()
Dummy script 1:
#!/usr/bin/python
# -*- coding: ascii -*-
import time
import datetime
sec = 3
print(datetime.datetime.now(), " Going to sleep for ", sec, " seconds")
time.sleep(sec)
print(datetime.datetime.now(), " Done sleeping dummy script1")
Dummy script 2:
#!/usr/bin/python
# -*- coding: ascii -*-
import time
def gotosleep(sleepTime):
print("Going to sleep for ", sleepTime, " seconds")
time.sleep(sleepTime)
print("Done sleeping for ", sleepTime, " seconds in dummy script2")
if __name__ == "__main__":
FIRST_EXTRA_ARG = 1
sleepTime = sys.argv[FIRST_EXTRA_ARG]
gotosleep(sleepTime)

Run infinite while loop in Python module

I'm writing a Python module to read jstest output and make Xbox gamepad working in Python on Linux. I need to start in background infinite while loop in __init__ on another thread that looks like this:
import os
from threading import Thread
import time
import select
import subprocess
class Joystick:
"""Initializes base class and launches jstest and xboxdrv"""
def __init__(self, refreshRate=2000, deadzone=4000):
self.proc = subprocess.Popen(['xboxdrv', '-D', '-v', '--detach-kernel-driver', '--dpad-as-button'], stdout=subprocess.PIPE, bufsize=0)
self.pipe = self.proc.stdout
self.refresh = refreshRate
self.refreshDelay = 1.0 / refreshRate
self.refreshTime = 0 # indicates the next refresh
self.deadzone = deadzone
self.start()
self.xbox = subprocess.Popen(['jstest', '--normal', '/dev/input/js0'], stdout=subprocess.PIPE, bufsize=-1, universal_newlines=True)
self.response = self.xbox.stdout.readline()
a = Thread(target=self.reload2())
a.start()
print("working")
def reload2(self):
while True:
self.response = self.xbox.stdout.readline()
print("read")
time.sleep(0.5)
def start(self):
global leftVibrateAmount, rightVibrateAmount
leftVibrateAmount = 0
rightVibrateAmount = 0
readTime = time.time() + 1 # here we wait a while
found = False
while readTime > time.time() and not found:
readable, writeable, exception = select.select([self.pipe], [], [], 0)
if readable:
response = self.pipe.readline()
# tries to detect if controller is connected
if response == b'[ERROR] XboxdrvDaemon::run(): fatal exception: DBusSubsystem::request_name(): failed to become primary owner of dbus name\n':
raise IOError("Another instance of xboxdrv is running.")
elif response == b'[INFO] XboxdrvDaemon::connect(): connecting slot to thread\n':
found = True
self.reading = response
elif response == b'':
raise IOError('Are you running as sudo?')
if not found:
self.pipe.close()
# halt if controller not found
raise IOError("Xbox controller/receiver isn't connected")
The loop is defined to start running in __init__ function like so:
a = threading.Thread(target=self.reload2) # code hangs here
a.start()
But each time I create variable "a", whole program hangs in while loop, which should be running in another thread.
Thanks for help.
You may be having issues with your __init__. I put it in a simple class as an example, and it runs as expected.
import time
from threading import Thread
class InfiniteLooper():
def __init__(self):
a = Thread(target=self.reload2) # reload, not reload(), otherwise you're executing reload2 and assigning the result to Target, but it's an infinite loop, so...
print('Added thread')
a.start()
print('Thread started')
def reload2(self):
while True:
self.response = input('Enter something')
print('read')
time.sleep(0.5)
loop = InfiniteLooper()
Output:
Added thread
Thread started
Enter something
1
read
Enter something
1
read
As you can see, the "Enter something" appears after I've added the thread and started it. It also loops fine

Timeout function in Python

I want to have a function, in Python (3.x), which force to the script itself to terminate, like :
i_time_value = 10
mytimeout(i_time_value ) # Terminate the script if not in i_time_value seconds
for i in range(10):
print("go")
time.sleep(2)
Where "mytimeout" is the function I need : it terminate the script in "arg" seconds if the script is not terminated.
I have seen good solutions for put a timeout to a function here or here, but I don't want a timeout for a function but for the script.
Also :
I know that I can put my script in a function or using something like subprocess and use it with a
timeout, I tried it and it works, but I want something more simple.
It must be Unix & Windows compatible.
The function must be universal i.e. : it may be add to any script in
one line (except import)
I need a function not a 'how to put a timeout in a script'.
signal is not Windows compatible.
You can send some signals on Windows e.g.:
os.kill(os.getpid(), signal.CTRL_C_EVENT) # send Ctrl+C to itself
You could use threading.Timer to call a function at a later time:
from threading import Timer
def kill_yourself(delay):
t = Timer(delay, kill_yourself_now)
t.daemon = True # no need to kill yourself if we're already dead
t.start()
where kill_yourself_now():
import os
import signal
import sys
def kill_yourself_now():
sig = signal.CTRL_C_EVENT if sys.platform == 'win32' else signal.SIGINT
os.kill(os.getpid(), sig) # raise KeyboardInterrupt in the main thread
If your scripts starts other processes then see: how to kill child process(es) when parent dies? See also, How to terminate a python subprocess launched with shell=True -- it demonstrates how to kill a process tree.
I would use something like this.
import sys
import time
import threading
def set_timeout(event):
event.set()
event = threading.Event()
i_time_value = 2
t = threading.Timer(i_time_value, set_timeout, [event])
t.start()
for i in range(10):
print("go")
if event.is_set():
print('Timed Out!')
sys.exit()
time.sleep(2)
A little bit of googling turned this answer up:
import multiprocessing as MP
from sys import exc_info
from time import clock
DEFAULT_TIMEOUT = 60
################################################################################
def timeout(limit=None):
if limit is None:
limit = DEFAULT_TIMEOUT
if limit <= 0:
raise ValueError()
def wrapper(function):
return _Timeout(function, limit)
return wrapper
class TimeoutError(Exception): pass
################################################################################
def _target(queue, function, *args, **kwargs):
try:
queue.put((True, function(*args, **kwargs)))
except:
queue.put((False, exc_info()[1]))
class _Timeout:
def __init__(self, function, limit):
self.__limit = limit
self.__function = function
self.__timeout = clock()
self.__process = MP.Process()
self.__queue = MP.Queue()
def __call__(self, *args, **kwargs):
self.cancel()
self.__queue = MP.Queue(1)
args = (self.__queue, self.__function) + args
self.__process = MP.Process(target=_target, args=args, kwargs=kwargs)
self.__process.daemon = True
self.__process.start()
self.__timeout = self.__limit + clock()
def cancel(self):
if self.__process.is_alive():
self.__process.terminate()
#property
def ready(self):
if self.__queue.full():
return True
elif not self.__queue.empty():
return True
elif self.__timeout < clock():
self.cancel()
else:
return False
#property
def value(self):
if self.ready is True:
flag, load = self.__queue.get()
if flag:
return load
raise load
raise TimeoutError()
def __get_limit(self):
return self.__limit
def __set_limit(self, value):
if value <= 0:
raise ValueError()
self.__limit = value
limit = property(__get_limit, __set_limit)
It might be Python 2.x, but it shouldn't be terribly hard to convert.

Background Process Locking up GUI Python

I have a background Process (using Process from multiprocessing) that is pushing objects to my GUI, however this background process keeps locking up the GUI and the changes being pushed are never being displayed. The objects are being put in to my queue, however the update method in my GUI isn't being called regularly. What can I do make the GUI update more regularly? My GUI is written in Tkinter.
My background process has a infinite loop within it because I always need to keep reading the USB port for more data, so basically my code looks like this:
TracerAccess.py
import usb
from types import *
import sys
from multiprocessing import Process, Queue
import time
__idVendor__ = 0xFFFF
__idProduct__ = 0xFFFF
END_POINT = 0x82
def __printHEXList__(list):
print ' '.join('%02x' % b for b in list)
def checkDeviceConnected():
dev = usb.core.find(idVendor=__idVendor__, idProduct=__idProduct__)
if dev is None:
return False
else:
return True
class LowLevelAccess():
def __init__(self):
self.rawIn = []
self.tracer = usb.core.find(idVendor=__idVendor__, idProduct=__idProduct__)
if self.tracer is None:
raise ValueError("Device not connected")
self.tracer.set_configuration()
def readUSB(self):
"""
This method reads the USB data from the simtracer.
"""
try:
tmp = self.tracer.read(END_POINT, 10000,None, 100000).tolist()
while(self.checkForEmptyData(tmp)):
tmp = self.tracer.read(END_POINT, 10000,None, 100000).tolist()
self.rawIn = tmp
except:
time.sleep(1)
self.readUSB()
def checkForEmptyData(self, raw):
if(len(raw) == 10 or raw[10] is 0x60 or len(raw) == 11):
return True
else:
return False
class DataAbstraction:
def __init__(self, queue):
self.queue = queue
self.lowLevel = LowLevelAccess()
def readInput(self):
while True:
self.lowLevel.readUSB()
raw = self.lowLevel.rawIn
self.queue.put(raw)
ui.py
from Tkinter import *
import time
import TracerAccess as io
from multiprocessing import Process, Queue
from Queue import Empty
from math import ceil
def findNumberOfLines(message):
lines = message.split("\n")
return len(lines)
class Application(Frame):
def addTextToRaw(self, text, changeColour=False, numberOfLines=0):
self.rawText.config(state=NORMAL)
if changeColour is True:
self.rawText.insert(END,text, 'oddLine')
else:
self.rawText.insert(END,text)
self.rawText.config(state=DISABLED)
def updateRaw(self, text):
if(self.numberOfData() % 2 is not 0):
self.addTextToRaw(text, True)
else:
self.addTextToRaw(text)
def startTrace(self):
self.dataAbstraction = io.DataAbstraction(self.queue)
self.splitProc = Process(target=self.dataAbstraction.readInput())
self.stopButton.config(state="normal")
self.startButton.config(state="disabled")
self.splitProc.start()
def pollQueue(self):
try:
data = self.queue.get(0)
self.dataReturned.append(data)
self.updateRaw(str(data).upper())
self.rawText.tag_config("oddLine", background="#F3F6FA")
except Empty:
pass
finally:
try:
if(self.splitProc.is_alive() is False):
self.stopButton.config(state="disabled")
self.startButton.config(state="normal")
except AttributeError:
pass
self.master.after(10, self.pollQueue)
def stopTrace(self):
self.splitProc.join()
self.stopButton.config(state="disabled")
self.startButton.config(state="normal")
def createWidgets(self):
self.startButton = Button(self)
self.startButton["text"] = "Start"
self.startButton["command"] = self.startTrace
self.startButton.grid(row = 0, column=0)
self.stopButton = Button(self)
self.stopButton["text"] = "Stop"
self.stopButton["command"] = self.stopTrace
self.stopButton.config(state="disabled")
self.stopButton.grid(row = 0, column=1)
self.rawText = Text(self, state=DISABLED, width=82)
self.rawText.grid(row=1, columnspan=4)
def __init__(self, master):
Frame.__init__(self, master)
self.queue = Queue()
self.master.after(10, self.pollQueue)
self.pack()
self.dataReturned = []
self.createWidgets()
def numberOfData(self):
return len(self.dataReturned)
Main.py
import ui as ui
if __name__ == "__main__":
root = Tk()
root.columnconfigure(0,weight=1)
app = ui.Application(root)
app.mainloop()
So the background thread never finishes, however when I end the process the UI starts to be displayed before closing. The problem could have appeared because of my design for the TracerAccess.py module as I developed this after moving straight form java and little to no design experience for python.
What multiprocess.Process does, internally, is really a fork(), which effectively duplicated your process. You can perhaps visualize it as:
/ ["background" process] -------------\
[main process] --+ +-- [main process]
\ [main process continued] -----------/
p.join() attempts to "join" the two processes back to one. This effectively means: waiting until the background process is finished. Here's the actual (full) code from the .join() function:
def join(self, timeout=None):
'''
Wait until child process terminates
'''
assert self._parent_pid == os.getpid(), 'can only join a child process'
assert self._popen is not None, 'can only join a started process'
res = self._popen.wait(timeout)
if res is not None:
_current_process._children.discard(self)
Note how self._popen.wait is called.
This is obviously not what you want.
What you probably want, in the context of TKinter, is use the tk event loop, for example like this (Python 3, but the concept also works on Python 2)
from multiprocessing import Process, Queue
import time, tkinter, queue, random, sys
class Test:
def __init__(self, root):
self.root = root
txt = tkinter.Text(root)
txt.pack()
self.q = Queue()
p = Process(target=self.bg)
p.start()
self.checkqueue()
print('__init__ done', end=' ')
def bg(self):
print('Starting bg loop', end=' ')
n = 42
while True:
# Burn some CPU cycles
(int(random.random() * 199999)) ** (int(random.random() * 1999999))
n += 1
self.q.put(n)
print('bg task finished', end=' ')
def checkqueue(self):
try:
print(self.q.get_nowait(), end=' ')
except queue.Empty:
print('Queue empty', end=' ')
sys.stdout.flush()
# Run myself again after 1 second
self.root.after(1000, self.checkqueue)
root = tkinter.Tk()
Test(root)
root.mainloop()
You don't call the .join(), and instead use the .after() method, which schedules a function to run after n microseconds (if you've ever used Javascript, then think setTimeout()) to read the queue.
Depending on the actual content of your bg() function, you may not event need multiprocesing at all, just scheduling a function with .after() may be enough.
Also see:
http://tkinter.unpythonic.net/wiki/UsingTheEventLoop

Categories