Producers and Consumers - Multiple Threads in Python - python

So I've got this code for Producers and Consumers;
import threading
import time
import random
N = 8
buffer = N * [None]
free = threading.Semaphore(N)
items = threading.Semaphore(0)
def prod():
n = 0
i = 0
while True:
time.sleep(random.random())
free.acquire()
buffer[i] = n
i = (i + 1) % N
n += 1
items.release()
def cons():
i = 0
while True:
time.sleep(random.random())
items.acquire()
print(buffer[i])
i = (i + 1) % N
free.release()
def main():
p = threading.Thread(target=prod, args=[])
c = threading.Thread(target=cons, args=[])
p.start()
c.start()
p.join()
c.join()
main()
But I want to be able to have three threads each for the producer and consumer. Can someone suggest a way I could do this using a third semaphore? Thanks.

Assuming this is not a homework about semaphores and you want a real solution, you should use the Queue object, which can handle all of this by itself. If I understood it correctly, you want three producers and three consumers that share one buffer that can have at maximum 8 items. If that's the case, the code can be simplified to something like this:
import threading
import Queue
def prod(queue):
n = 0
while True:
time.sleep(random.random())
queue.put(n)
n += 1
def cons(queue):
while True:
time.sleep(random.random())
n = queue.get()
print n
def main():
N = 8
queue = Queue.Queue(N)
threads = []
for i in range(3):
threads.append(threading.Thread(target=cons, args=[queue])))
threads.append(threading.Thread(target=prod, args=[queue])))
for thread in threads:
thread.start()
for thread in threads:
thread.join() # this will never really finish, because the threads run forever
If you are interested how is the queue implemented internally, you can see the source code here.

Related

Race Condition Doesn't happen

I have written a bit of code to see the race condition, But it Doesn't happen.
class SharedContent:
def __init__(self, initia_value = 0) -> None:
self.initial_value = initia_value
def incerease(self ,delta = 1):
sleep(1)
self.initial_value += delta
content = SharedContent(0)
threads: list[Thread] = []
for i in range(250):
t = Thread(target=content.incerease)
t.start()
threads.append(t)
#wait until all threads have finished their job
while True:
n = 0
for t in threads:
if t.is_alive():
sleep(0.2)
continue
n += 1
if n == len(threads):
break
print(content.initial_value)
The output is 250 which implies no race condition has happened!
Why is that?
I even tried this with random sleep time but the output was the same.
I changed your program. This version prints a different number every time I run it.
#!/usr/bin/env python3
from threading import Thread
class SharedContent:
def __init__(self, initia_value = 0) -> None:
self.initial_value = initia_value
def incerease(self ,delta = 1):
for i in range(0, 1000000):
self.initial_value += delta
content = SharedContent(0)
threads = []
for i in range(2):
t = Thread(target=content.incerease)
t.start()
threads.append(t)
#wait until all threads have finished their job
for t in threads:
t.join()
print(content.initial_value)
What I changed:
Only two threads instead of 250.
Got rid of sleep() calls.
Each thread increments the variable one million times instead of just one time.
Main program uses join() to wait for the threads to finish.

How to gather results from Python's Thread class with an input and output queue?

I'm trying to learn a bit about threading in python. I'm aware there are various other pools and processes or Pipes that I can use that might be easier but I'm interested in using the threading module.
from threading import Thread
from queue import Queue
class SimulationThread(Thread):
def __init__(self, input_queue: Queue, results_queue: Queue):
Thread.__init__(self)
self.input_queue = input_queue
self.results_queue = results_queue
def run(self) -> None:
try:
data = self.input_queue.get() # will be replaced with simulation data
self.results_queue.put(data)
finally:
return self.input_queue.task_done()
N = 10 # number of simulations to run
NP = 8 # number of threads to use
input_queue = Queue()
results_queue = Queue()
for x in range(NP):
worker = SimulationThread(input_queue, results_queue)
worker.daemon = True
worker.start()
for i in range(N):
input_queue.put(i)
Now I've tried a few different things to gather up the results:
# always returns 0
print(results_queue.get())
#hangs
results_queue.join()
# does nothing, I'm quessing queue is not yet populated
while not results_queue.empty():
print(results_queue.get())
# prints nothing
ret = results_queue.get()
while ret is None:
ret = results_queue.get()
print(ret)
# finally prints out the results, but in order of 1 - 7. No 8 or 9.
ret = results_queue.get()
while ret is not None:
ret = results_queue.get()
print(ret)
if results_queue.empty():
ret = None
While is where I stop and ask for some help. How can I get all NP threads processing all N numbers at the same time?
By default Queue.get will block if necessary until an item is available. You add 10 items to the input_queue queue but then you create only 8 threads.
Your threads should continuously collect and process items from the queue until they are stopped. You can try something like this:
test.py:
from queue import Queue
from threading import Thread
N = 10 # number of simulations to run
NP = 8 # number of threads to use
class SimulationThread(Thread):
def __init__(self, input_queue, results_queue):
super().__init__()
self.input_queue = input_queue
self.results_queue = results_queue
def run(self):
for data in iter(self.input_queue.get, "STOP"):
self.results_queue.put(data * 2)
def main():
input_queue = Queue()
results_queue = Queue()
for i in range(N):
input_queue.put(i)
for _ in range(NP):
SimulationThread(input_queue, results_queue).start()
for i in range(N):
print(i, results_queue.get())
for _ in range(NP):
input_queue.put("STOP")
if __name__ == "__main__":
main()
Test:
$ python test.py
0 0
1 2
2 4
3 6
4 8
5 10
6 12
7 14
8 16
9 18
You probably want to use a JoinableQueue. Each task, as it finishes its job, calls .task_done() on the queue from input is fetched.
Your main thread then calls queue.join() on that same queue. This will not return until there have been as many calls to task_done() as there have been items added to the queue.

how to add specific number of additional workers to an exisiting multiprocessing pool?

In below situation I've created a default pool with two workers and perform tasks. During task processing the task_queue is checked regularly so it doesn't exceeds a certain length limit and prevents up/down stream clutter. How to add dynamically more workers to reduce the task queue length?
import multiprocessing as mp
... code snippet...
def main(poolsize, start_process):
pool = mp.Pool(processes=poolsize, initializer=start_process)
done = False
task_queue = []
while True:
... snippet code : do something ...
if len(task_queue) >= 10:
... code to expand pool goes here...
if done == True:
break
.. do final something ...
if __name__ == '__main__':
# freeze_support()
poolsize = 2
main(poolsize)
To add more workers during a running pool processing job you can add below function within the while-loop:
def repopulate(pool, add_workers):
current_pool_size = len(pool._pool) # _.pool gets the current pool size.
new_pool_size = current_pool_size + add_workers
pool._processes = new_pool_size
pool._repopulate_pool()
return pool
Within the while-loop from main():
if len(task_queue) >= 10:
new_workers = 2
repopulate(poolname, new_workers)
multiprocessing.Pool with dynamic size
full example based on the answer by ZF007
import multiprocessing, time, random
def worker_function(job_id):
dt = random.randint(1, 10)
print(f"job {job_id}: sleeping for {dt} seconds")
time.sleep(dt)
return job_id * job_id
def get_job_done(job_id):
return lambda val: print(f"job {job_id}: job done: val={val}")
def grow_pool(pool, new_size, max_size=None):
new_size = min(new_size, max_size) if max_size else new_size
if new_size > pool._processes:
print(f"growing pool from {pool._processes} to {new_size}")
pool._processes = new_size
pool._repopulate_pool()
if __name__ == "__main__":
# start pool
start_workers = 1 # start N workers before demand
max_workers = 4 # run N workers on demand
pool = multiprocessing.Pool(start_workers)
# add jobs
num_jobs = 10
grow_pool(pool, num_jobs, max_workers)
for job_id in range(0, num_jobs):
job_done = get_job_done(job_id)
print(f"job {job_id}: adding job")
pool.apply_async(worker_function, args=(job_id,), callback=job_done)
# wait
pool.close()
pool.join()

Rendezvous threads - thread doesn't finish processing when join() unblocks

I'm going through the "Little Book of Semaphores" right now, and I'm having a problem with the first Barrier problem. In the below code , I'm trying to have 3 threads rendezvous before continuing. This part works fine - I always get 3 "before"s pushed to the queue. However, I don't always get 3 "after"s pushed to the queue. Sometimes I do, but not always. What am I doing wrong?
import threading
import random
import Queue
import time
num_loops = 1
class myThread(threading.Thread):
def __init__(self, id, count, n, q, locks):
threading.Thread.__init__(self)
self.id = id
self.q = q
self.n = n
self.locks = locks
self.count = count
return
def run(self):
time.sleep(random.random()/100)
self.q.put("before")
with self.locks['mutex']:
self.count[0] += 1
if self.count[0] == self.n:
locks['barrier'].release()
locks['barrier'].acquire()
locks['barrier'].release()
time.sleep(random.random()/100)
self.q.put("after")
if __name__ == '__main__':
total = 10
incorrect = 0
num_threads = 3
for _ in range(total):
q = Queue.Queue()
locks = {'mutex': threading.Semaphore(1),
'barrier': threading.Semaphore(0),
}
threads = []
count = [0]
for i in range(num_threads):
t = myThread(i, count, num_threads, q, locks)
t.start()
threads.append(t)
for i in threads:
t.join()
print "join"
one_loop = ['before']*num_threads + ['after']*num_threads
total_loop = one_loop * num_loops
result = []
while not q.empty():
result.append(q.get())
print result
if result != total_loop:
incorrect += 1
print "%s out of %s is wrong" % (incorrect, total)
I found the problem. You do not join all the threads. The line:
for i in threads:
t.join()
print "join"
Should be:
for i in threads:
i.join() # changed line
print "join"
Joining t is first just waiting for the last thread created, then in the rest of the iterations a no-op.

how to print this number in a shortest time using thread in python

this is my code :
import thread
k=0
b=0
def a(n):
i = 0
while i<n:
print i
i += 1
j = 5000
while k < 5000:
a(k)
k+=1
for n in range(2,5):
thread.start_new_thread(a,(j*n,))
and i want to Run three threads and a main thread,
the main thread print 1,2,3,4,5,....5000
and the thread1 print 5001,5002,5003,...10000
the thread2 print 10001,10002,10003,...15000
the thread3 print 15001,15002,15003,...20000
they are at the same time
so what can i do ,
thanks
You should use threading instead of thread, since it's easier to implement and it works in almost every case. Now your code will be like:
import threading
class PrintNumber(Thread):
def __init__(self, n):
self.n = n
def run(self):
for i in range(n, n+5000):
print i
# create your threads here
# use a cicle if needed
thread = PrintNumber(0) # first 5000 numbers
thread.start()
thread = PrintNumber(5000) # next 5000
thread.start()
Coded from mind and have not tested it, should be working anyway

Categories