I'm new to python and having trouble with the python queue, I'm initializing queue in my init constructor by when I run my python app it crashes, I've included a snippet of my code is there a better way to do it?
import os, sys
import time
if(sys.hexversion < 0x03000000):
import Queue
else:
import queue as Queue
class Appwindow():
def __init__(self):
self.myQueue = Queue.Queue()
def displayMeth(self, stuff):
if self.displayed:
self.myQueue.put(stuff)
try:
from queue import Queue
except ImportError:
from Queue import Queue
# shiny fancy Atomic Message Queue for concurrency
q = Queue()
Related
I'm implementing with SimpleXMLRPCServer & Redis.
This is some example code of how i create the workers, (the workers are created from the client).
Now i want to implement Fault Tolerance, so if the main cluster fails (e.g Ctrl+C or any exception), iterate over my workers dictionary and select one as the new leader to become
the master (cluster).
This is the cluster main:
if __name__ == '__main__':
#-------------------- Start Redis & XMLRPC Server --------------------#
print("Starting Redis & XMLRPC Server...")
REDIS_SERVER = redis.Redis()
server = SimpleXMLRPCServer(('localhost', 9000), allow_none=True)
server.register_function(add_worker, "add_worker")
... other functions
server.register_function(missatge_worker_eliminat, "missatge_worker_eliminat")
try:
print('Control-C to exit')
server.serve_forever()
except KeyboardInterrupt:
print('Exiting, but there is another cluster open now!')
from audioop import add
from decimal import MIN_EMIN
from glob import glob
from xmlrpc.server import SimpleXMLRPCServer
import multiprocessing as mp
import xmlrpc.client
from multiprocessing import Process
import json
import redis
import logging
import pandas as pd
import dask.dataframe as dd
import uuid
import os
import time, threading
import asyncio
def add_worker():
global WORKERS
global WORKER_ID
global processos
proc = Process(target=start_worker, args=(REDIS_SERVER,))
proc.start()
WORKERS[WORKER_ID] = proc
WORKER_ID += 1
processos.append(proc)
return "[!] Worker with ID: {id} succesfully added.".format(id=WORKER_ID)
So I have been struggling with this one error of pickle which is driving me crazy. I have the following master Engine class with the following code :
import eventlet
import socketio
import multiprocessing
from multiprocessing import Queue
from multi import SIOSerever
class masterEngine:
if __name__ == '__main__':
serverObj = SIOSerever()
try:
receiveData = multiprocessing.Process(target=serverObj.run)
receiveData.start()
receiveProcess = multiprocessing.Process(target=serverObj.fetchFromQueue)
receiveProcess.start()
receiveData.join()
receiveProcess.join()
except Exception as error:
print(error)
and I have another file called multi which runs like the following :
import multiprocessing
from multiprocessing import Queue
import eventlet
import socketio
class SIOSerever:
def __init__(self):
self.cycletimeQueue = Queue()
self.sio = socketio.Server(cors_allowed_origins='*',logger=False)
self.app = socketio.WSGIApp(self.sio, static_files={'/': 'index.html',})
self.ws_server = eventlet.listen(('0.0.0.0', 5000))
#self.sio.on('production')
def p_message(sid, message):
self.cycletimeQueue.put(message)
print("I logged : "+str(message))
def run(self):
eventlet.wsgi.server(self.ws_server, self.app)
def fetchFromQueue(self):
while True:
cycle = self.cycletimeQueue.get()
print(cycle)
As you can see I can trying to create two processes of def run and fetchFromQueue which i want to run independently.
My run function starts the python-socket server to which im sending some data from a html web page ( This runs perfectly without multiprocessing). I am then trying to push the data received to a Queue so that my other function can retrieve it and play with the data received.
I have a set of time taking operations that I need to carry out with the data received from the socket which is why im pushing it all into a Queue.
On running the master Engine class I receive the following :
Can't pickle <class 'threading.Thread'>: it's not the same object as threading.Thread
I ended!
[Finished in 0.5s]
Can you please help with what I am doing wrong?
From multiprocessing programming guidelines:
Explicitly pass resources to child processes
On Unix using the fork start method, a child process can make use of a shared resource created in a parent process using a global resource. However, it is better to pass the object as an argument to the constructor for the child process.
Apart from making the code (potentially) compatible with Windows and the other start methods this also ensures that as long as the child process is still alive the object will not be garbage collected in the parent process. This might be important if some resource is freed when the object is garbage collected in the parent process.
Therefore, I slightly modified your example by removing everything unnecessary, but showing an approach where the shared queue is explicitly passed to all processes that use it:
import multiprocessing
MAX = 5
class SIOSerever:
def __init__(self, queue):
self.cycletimeQueue = queue
def run(self):
for i in range(MAX):
self.cycletimeQueue.put(i)
#staticmethod
def fetchFromQueue(cycletimeQueue):
while True:
cycle = cycletimeQueue.get()
print(cycle)
if cycle >= MAX - 1:
break
def start_server(queue):
server = SIOSerever(queue)
server.run()
if __name__ == '__main__':
try:
queue = multiprocessing.Queue()
receiveData = multiprocessing.Process(target=start_server, args=(queue,))
receiveData.start()
receiveProcess = multiprocessing.Process(target=SIOSerever.fetchFromQueue, args=(queue,))
receiveProcess.start()
receiveData.join()
receiveProcess.join()
except Exception as error:
print(error)
0
1
...
I am trying to emit a pyqtsignal through multi-threading fashion. I created a function that performs the computations (e.g func). And another function that takes that task and run it in multiple threads (e.g Function).
The code works well when I use the parent thread. But, when I use the multiple threads, computations works well, but the signal is not emitted.
I need to use the multithreading, since the functions I am writing perform computational expensive tasks.
Please find below the sample code (I have used simple print function in this example)
from PyQt5.QtCore import QObject, pyqtSignal,pyqtSlot
import time
from threading import Thread
import sys
import math
import concurrent.futures
class Plot2D(QObject):
finish=pyqtSignal(float)
def __init__(self):
super(Plot2D,self).__init__()
def Function(self):
st = time.time()
# Using parent thread
# self.func()
# Using multi-thread 1
#t=Thread(target=self.func)
#t.start()
#t.join()
# Using multi-thread 2
with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
f = executor.submit(self.func)
en = time.time()
print(en-st)
def func(self):
n=10
v=(1*100/(n-1))
for i in range(n):
print('thread')
self.finish.emit(v)
def fprint(self):
print('works')
obj=Plot2D()
obj.finish.connect(obj.fprint)
obj.Function()
You have to be clear about the following concept: The signals need an event loop for the signals to work.
Considering the above, the solutions are:
threading.Thread
You should not use join() because it blocks the main thread where the event-loop lives and because of the above the signals will not work.
from PyQt5 import QtCore
from threading import Thread
class Plot2D(QtCore.QObject):
finished = QtCore.pyqtSignal(float)
def Function(self):
Thread(target=self.func).start()
def func(self):
n = 10
v = 1 * 100 / (n - 1)
for i in range(n):
print("thread")
self.finished.emit(v)
#QtCore.pyqtSlot()
def fprint(self):
print("works")
if __name__ == "__main__":
import sys
app = QtCore.QCoreApplication(sys.argv)
obj = Plot2D()
obj.finished.connect(obj.fprint)
obj.Function()
sys.exit(app.exec_())
Output:
thread
thread
thread
works
thread
works
thread
works
thread
thread
works
thread
works
thread
thread
works
works
works
works
works
concurrent.futures
Do not use with since it will make the executor block the main thread (and we already know what problem it generates), it also calls executor.shutdown(wait = False)
from PyQt5 import QtCore
import concurrent.futures
class Plot2D(QtCore.QObject):
finished = QtCore.pyqtSignal(float)
def Function(self):
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
f = executor.submit(self.func)
executor.shutdown(wait=False)
def func(self):
n = 10
v = 1 * 100 / (n - 1)
for i in range(n):
print("thread")
self.finished.emit(v)
#QtCore.pyqtSlot()
def fprint(self):
print("works")
if __name__ == "__main__":
import sys
app = QtCore.QCoreApplication(sys.argv)
obj = Plot2D()
obj.finished.connect(obj.fprint)
obj.Function()
sys.exit(app.exec_())
Output:
thread
thread
works
thread
works
thread
works
thread
works
thread
thread
works
thread
works
thread
works
thread
works
works
works
Is there a way to create a callback that executes whenever something is sent to the main process from a child process initiated via multiprocessing? The best I can think of thus far is:
import multiprocessing as mp
import threading
import time
class SomeProcess(mp.Process):
def run(self):
while True
time.sleep(1)
self.queue.put(time.time())
class ProcessListener(threading.Thread):
def run(self):
while True:
value = self.queue.get()
do_something(value)
if __name__ = '__main__':
queue = mp.Queue()
sp = SomeProcess()
sp.queue = queue
pl = ProcessListener()
pl.queue = queue
sp.start()
pl.start()
No there is no other clean way to do so than the one you already posted.
This is how concurrent.fututes.ProcessPoolExecutor and multiprocessing.Pool are actually implemented. They have a dedicated thread which drains the tasks/results queue and run any associated callback.
If you want to save some resource, you can use a SimpleQueue in this case.
I have the problem that I need to write values generated by a consumer to disk. I do not want to open a new instance of a file to write every time, so I thought to use a second queue and a other consumer to write to disk from a singe Greenlet. The problem with my code is that the second queue does not get consumed async from the first queue. The first queue finishes first and then the second queue gets consumed.
I want to write values to disk at the same time then other values get generated.
Thanks for help!
#!/usr/bin/python
#- * -coding: utf-8 - * -
import gevent #pip install gevent
from gevent.queue import *
import gevent.monkey
from timeit import default_timer as timer
from time import sleep
import cPickle as pickle
gevent.monkey.patch_all()
def save_lineCount(count):
with open("count.p", "wb") as f:
pickle.dump(count, f)
def loader():
for i in range(0,3):
q.put(i)
def writer():
while True:
task = q_w.get()
print "writing",task
save_lineCount(task)
def worker():
while not q.empty():
task = q.get()
if task%2:
q_w.put(task)
print "put",task
sleep(10)
def asynchronous():
threads = []
threads.append(gevent.spawn(writer))
for i in range(0, 1):
threads.append(gevent.spawn(worker))
start = timer()
gevent.joinall(threads,raise_error=True)
end = timer()
#pbar.close()
print "\n\nTime passed: " + str(end - start)[:6]
q = gevent.queue.Queue()
q_w = gevent.queue.Queue()
gevent.spawn(loader).join()
asynchronous()
In general, that approach should work fine. There are some problems with this specific code, though:
Calling time.sleep will cause all greenlets to block. You either need to call gevent.sleep or monkey-patch the process in order to have just one greenlet block (I see gevent.monkey imported, but patch_all is not called). I suspect that's the major problem here.
Writing to a file is also synchronous and causes all greenlets to block. You can use FileObjectThread if that's a major bottleneck.