How to connect 2 ultrasonic sensors concurrently using python multiprocessing process? - python

I'm new to python but i'm working on a project in which i need to connect 3 or more Ultrasonic Sensors concurrently. I read all about threads and multiprocessing ran a couple of examples successfully. I know the code has to be run form the command prompt or the PI2 terminal. However, the multiprocessing code I wrote does not work and I cannot figure out why. Could someone please help me.
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A(name):
while True:
ultrasonic.wait_for_in_range()
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B(name):
while True:
ultrasonic_B.wait_for_in_range()
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C(name):
while True:
ultrasonic_C.wait_for_in_range()
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.start()
p1.start()
p2.start()
I took your suggestions into consideration and was able to make the first loop work but the other 2 loops give me nothing. Here is the updated code
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A():
while ultrasonic.wait_for_in_range():
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B():
while ultrasonic_B.wait_for_in_range():
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C():
while ultrasonic_C.wait_for_in_range():
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.run()
p1.run()
p2.run()
Result comes form first loop
Distance
Out of range
Distance
Out of range
OK this is what I have
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A():
while ultrasonic.wait_for_in_range():
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B():
while ultrasonic_B.wait_for_in_range():
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C():
while ultrasonic_C.wait_for_in_range():
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.start()
p1.start()
p2.start()

You do not say what you mean with "does not work", so I am taking a few guesses here.
The obvious fail here would be:
TypeError: A() takes exactly 1 argument (0 given)
Since functions A, B and C all take an argument name, and you do not provide it in Process(target=A). It works if you just remove the parameter from the functions, since you are not even using it.
You can also provide the argument in the call like this:
p = Process(target=A, args=('ultra_a',))
Other one could be indentation error, at least in your code paste you have one extra space at each line until def B.

import multiprocess
from gpiozero import DistanceSensor
def A():
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic.max_distance = 1
while ultrasonic.wait_for_in_range():
print('Sensor_1' ultrasonic.distance, 'm')
if ultrasonic.wait_for_out_of_range():
print('0')
def B():
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_B.max_distance = 1
while ultrasonic_B.wait_for_in_range():
print('Sensor_2', ultrasonic_B.distance, 'm')
if ultrasonic_B.wait_for_out_in_range():
print('0')
def C():
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic_C.max_distance = 1
while ultrasonic_C.wait_for_in_range():
print('Sensor_3',ultrasonic_C.distance, 'm')
if ultrasonic_C.wait_for_out_in_range():
print('0')
if __name__ == "__main__":
p = multiprocess.Process(target=A)
p1 = multiprocess.Process(target=B)
p2 = multiprocess.Process(target=C)
p.start()
p1.start()
p2.start()

Related

Use generator to iterate through data from a multiprocess

I would like to perform the following below using multiprocess, instead of subprocess.Popen. This is because I cannot pass objects using popen. I know my simple example below does not use/pass objects, but that is what I want to do.
Sample code is:
main.py
import subprocess
class ProcReader():
def __init__(self, python_file):
self.proc = subprocess.Popen(['python', python_file], stdout=subprocess.PIPE)
def __iter__(self):
return self
def __next__(self):
while True:
line = self.proc.stdout.readline()
if not line:
raise StopIteration
return line
if __name__ == "__main__":
r1 = ProcReader("test1.py")
r2 = ProcReader("test2.py")
r3 = ProcReader("test3.py")
for l1, l2, l3 in zip(r1, r2, r3):
d1 = l1.decode('utf-8').strip().split(",")
d2 = l2.decode('utf-8').strip().split(",")
d3 = l3.decode('utf-8').strip().split(",")
print(f"{d1[0]}:{d1[1]},{d2[0]}:{d2[1]},{d3[1]}:{d3[1]}")
test#.py
for x in range(10):
print("test1,{}".format(x))
My sample code is in python3, but I would like an equivalent, using multiprocess, in python2.7. Should the equivalent also read from stdout? Or should it utilize the queue and just have a worker reading from the queue?
Update---------
My example using multiprocessing:
import time
from multiprocessing import Process, Queue
def writer1(queue):
for x in range(10):
time.sleep(1)
queue.put("test1,{}".format(x))
def writer2(queue):
for x in range(10):
time.sleep(2)
queue.put("test2,{}".format(x))
def writer3(queue):
for x in range(10):
queue.put("test3,{}".format(x))
if __name__=='__main__':
q1 = Queue()
q2 = Queue()
q3 = Queue()
writer_1 = Process(target=writer1, args=((q1),))
writer_1.daemon = True
writer_1.start()
writer_2 = Process(target=writer2, args=((q2),))
writer_2.daemon = True
writer_2.start()
writer_3 = Process(target=writer3, args=((q3),))
writer_3.daemon = True
writer_3.start()
while True:
msg1 = q1.get()
msg2 = q2.get()
msg3 = q3.get()
if msg1 and msg2 and msg3:
d1 = msg1.strip().split(",")
d2 = msg2.strip().split(",")
d3 = msg3.strip().split(",")
print("{}:{},{}:{},{}:{}".format(d1[0],d1[1],
d2[0],d2[1],
d3[0],d3[1]))
else:
break
Didnt realize q1.get() waits until something is there, I added sleep to verify this. Also, how do I check that the process is done writing? Seems to be just waiting at the end
To adapt your second example for my comment about sentinel objects, maybe you're looking for something like
import os
import time
from multiprocessing import Process, Queue
def writer(queue):
value = os.getpid()
for x in range(10):
time.sleep(0.1)
queue.put("{},{}".format(value, x))
queue.put(None)
def spawn_process():
q = Queue()
p = Process(target=writer, args=(q,))
p.daemon = True
p.start()
return (p, q)
if __name__ == "__main__":
processes_and_queues = [spawn_process() for x in range(3)]
processes, queues = zip(*processes_and_queues)
live_queues = list(queues)
while live_queues:
messages = []
for queue in live_queues:
message = queue.get()
if message is None:
live_queues.remove(queue)
messages.append(message)
if len(messages) == len(processes):
print(messages)
It outputs (e.g.)
['51748,0', '51749,0', '51750,0']
['51748,1', '51749,1', '51750,1']
['51748,2', '51749,2', '51750,2']
['51748,3', '51749,3', '51750,3']
['51748,4', '51749,4', '51750,4']
['51748,5', '51749,5', '51750,5']
['51748,6', '51749,6', '51750,6']
['51748,7', '51749,7', '51750,7']
['51748,8', '51749,8', '51750,8']
['51748,9', '51749,9', '51750,9']

Python: multiprocessing Queue.put() in module won't send anything to parent process

I am trying to make 2 processes communicate between each other using the multiprocessing package in Python, and more precisely the Queue() class. From the parent process, I want to get an updated value of the child process each 5 seconds. This child process is a class function. I have done a toy example where everything works fine.
However, when I try to implement this solution in my project, it seems that the Queue.put() method of the child process in the sub-module won't send anything to the parent process, because the parent process won't print the desired value and the code never stops running. Actually, the parent process only prints the value sent to the child process, which is True here, but as I said, never stops.
So my questions are:
Is there any error in my toy-example ?
How should I modify my project in order to get it working just like my toy example ?
Toy example: works
main module
from multiprocessing import Process, Event, Lock, Queue, Pipe
import time
import test_mod as test
def loop(output):
stop_event = Event()
q = Queue()
child_process = Process(target=test.child.sub, args=(q,))
child_process.start()
i = 0
print("started at {} ".format(time.time()))
while not stop_event.is_set():
i+=1
time.sleep(5)
q.put(True)
print(q.get())
if i == 5:
child_process.terminate()
stop_event.set()
output.put("main process looped")
if __name__ == '__main__':
stop_event, output = Event(), Queue()
k = 0
while k < 5:
loop_process = Process(target=loop, args=(output,))
loop_process.start()
print(output.get())
loop_process.join()
k+=1
submodule
from multiprocessing import Process, Event, Lock, Queue, Pipe
import time
class child(object):
def __init__(self):
pass
def sub(q):
i = 0
while i < 2000:
latest_value = time.time()
accord = q.get()
if accord == True:
q.put(latest_value)
accord = False
time.sleep(0.0000000005)
i+=1
Project code: doesn't work
main module
import neat #package in which the submodule is
import *some other stuff*
def run(config_file):
config = neat.Config(some configuration)
p = neat.Population(config)
**WHERE MY PROBLEM IS**
stop_event = Event()
q = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, q, other args))
child_process.start()
i = 0
while not stop_event.is_set():
q.put(True)
print(q.get())
time.sleep(5)
i += 1
if i == 5:
child_process.terminate()
stop_event.set()
if __name__ == '__main__':
run(config_file)
submodule
class Population(object):
def __init__():
*initialization*
def run(self, q, other args):
while n is None or k < n:
*some stuff*
accord = add_2.get()
if accord == True:
add_2.put(self.best_genome.fitness)
accord = False
return self.best_genome
NB:
I am not used to multiprocessing
I have tried to give the most relevant parts of my project, given that the entire code would be far too long.
I have also considered using Pipe(), however this option didn't work either.
If I see it correctly, your desired submodule is the class Population. However, you start your process with a parameter of the type ParallelEvaluator. Next, I can't see that you supply your Queue q to the sub-Process. That's what I see from the code provided:
stop_event = Event()
q = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, **args)
child_process.start()
Moreover, the following lines create a race condition:
q.put(True)
print(q.get())
The get command is like a pop. So it takes an element and deletes it from the queue. If your sub-process doesn't access the queue between these two lines (because it is busy), the True will never make it to the child-process. Hence, it is better two use multiple queues. One for each direction. Something like:
stop_event = Event()
q_in = Queue()
q_out = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, **args))
child_process.start()
i = 0
while not stop_event.is_set():
q_in.put(True)
print(q_out.get())
time.sleep(5)
i += 1
if i == 5:
child_process.terminate()
stop_event.set()
This is your submodule
class Population(object):
def __init__():
*initialization*
def run(self, **args):
while n is None or k < n:
*some stuff*
accord = add_2.get() # add_2 = q_in
if accord == True:
add_3.put(self.best_genome.fitness) #add_3 = q_out
accord = False
return self.best_genome

Why can't I start two threads inside constructor __init__() of a python class?

I'm simply trying to run two different daemon threads and print a line from each for testing. While this code works :
import time
import threading
from threading import Thread
from myFunctions import *
class Monitoring:
def alarms(self):
return alarms.run()
def generator(self):
return generator.run()
def run(self):
generator = threading.Thread(target=self.alarms)
generator.daemon = True
generator.start()
alarm = threading.Thread(target=self.generator)
alarm.daemon = True
alarm.start()
print("started thread")
if __name__ == '__main__':
try:
d = Monitoring()
d.daemon = True
d.run()
print("started the thread")
while 1:
time.sleep(1)
except KeyboardInterrupt:
alarms.close()
generator.close()
print("Main - Keyboard interrupt in __main__")
Something like this doesn't seem to work and only the first thread "alarms" start. Why is that?
class Monitoring:
def __init__(self,a,g):
self.a = a
self.g = g
def run(self):
generator = threading.Thread(target=self.a)
generator.daemon = True
generator.start()
alarm = threading.Thread(target=self.g)
alarm.daemon = True
alarm.start()
print("#class run() ")
if __name__ == '__main__':
try:
d = Monitoring(alarms.run(), generator.run())
d.daemon = True
d.run()
print("#__main__")
while 1:
time.sleep(1)
except KeyboardInterrupt:
alarms.close()
generator.close()
print("Main - Keyboard interrupt in __main__")
In the line
d = Monitoring(alarms.run(), generator.run())
the functions alarms.run and generator.run are called instantly and the return values (probably None) are given as arguments to the Monitoring constructor.
To use the function objects as arguments (which are then called in a new thread), use
d = Monitoring(alarms.run, generator.run)
instead.

Trouble starting multiprocess threads and using events

I am trying to feed both an event and a queue to my worker threads. The queue is for them to talk to each other the event is for signalling them to stop working when needed. I keep getting this error and I cant quite figure out what is wrong with my definition constructors
Here is the error:
Traceback (most recent call last):
File "dualThreaded.py", line 52, in <module>
c = get_Count(q, e)
TypeError: __init__() takes exactly 2 arguments (3 given)
Here is my code:
from Queue import Empty
from multiprocessing import Process, Queue, Event
import time
class get_Count(object):
def __init__(self, q):
self.q = q
def run(self, e):
i = 0
run = True
while run == True:
print 'putting'
self.q.put('foo %d' % i )
time.sleep(.5)
if e.is_set():
run = False
class read_Count(object):
def __init__(self, q):
self.q = q
def run(self, e):
while True:
try:
value = self.q.get(False)
print value
except Empty:
print 'Nothing to process atm'
e.set()
time.sleep(.2)
if __name__ == '__main__':
e = Event()
q = Queue()
c = get_Count(q, e)
l = read_Count(q, e)
p1 = Process(target=c.run)
p1.start()
p2 = Process(target=l.run)
p2.start()
p1.join()
p2.join()
edit fixed typos:
Ignoring your inaccurate stack frame for a moment, consider c = get_Count(q, e). get_Count's initializer def __init__(self, q): only takes one parameter after the mandatory self, but you call it with two. I've updated your code to include e in your initializer and do some minor cleanup. it kinda runs but you will likely need to work out the next issue.
from Queue import Empty
from multiprocessing import Process, Queue, Event
import time
class get_Count(object):
def __init__(self, q, e):
self.q = q
self.e = e
def run(self):
i = 0
run = True
while run == True:
print 'putting'
self.q.put('foo %d' % i )
time.sleep(.5)
if self.e.is_set():
run = False
class read_Count(object):
def __init__(self, q, e):
self.q = q
self.e = e
def run(self):
while True:
try:
value = self.q.get(False)
print value
except Empty:
print 'Nothing to process atm'
self.e.set()
time.sleep(.2)
if __name__ == '__main__':
e = Event()
q = Queue()
c = get_Count(q, e)
l = read_Count(q, e)
p1 = Process(target=c.run)
p1.start()
p2 = Process(target=l.run)
p2.start()

How to communicate between process in real time?

I have two processes and the data of one process has to be communicated to the other. I wrote a basic queue in order to communicate in real time but it doesn't serve the purpose.
The following is example code:
from multiprocessing import Process , Pipe , Queue
a , b = Pipe()
q = Queue()
def f(name):
i = 0
while i < 4:
q.put(i)
i += 1
def t():
print q.get()
if __name__ == '__main__':
p = Process(target=f, args=('bob',))
p.start()
p.join()
p1 = Process(target=t, args= (''))
p1.start()
p1.join()
The expected output was 0 1 2 3 4, but I only get 0.
How can I resolve this?
try with this version :
def t():
while True:
try:
print q.get(timeout=1)
except:
break
You're only calling get() once. It returns one item at a time.
(As an aside, your function f is very non-Pythonic, ty:
def f(name):
for i in range(4):
q.put(i)
You're also using q as a global...

Categories