Probably a simple question, I'm a beginner.
So, I have a script with a function that returns a DataFrame using threading. Its working fine, but I'm having trouble understanding what's going on when I create the thread. The code:
def test(word,word1):
print(f'start {word}')
df_1 = pd.DataFrame({'test_1':[word],'test_2':[word1]})
time.sleep(2)
print(f'end {word}')
return df_1
my_queue = queue.Queue()
if __name__ == "__main__":
t1_start = perf_counter()
x = threading.Thread(target=lambda q, arg1,arg2: q.put(test(arg1,arg2)), args=(my_queue, 'first','ok1'))
x1 = threading.Thread(target=lambda q, arg1,arg2: q.put(test(arg1,arg2)), args=(my_queue, 'second','ok2'))
x1.start()
x.start()
print('\nrun something')
x.join()
x1.join()
t2_start = perf_counter()
print(f'\n time:{t2_start-t1_start}')
as I said, this script works fine, giving me the following return:
But if I try to remove the lambda function as below:
if __name__ == "__main__":
t1_start = perf_counter()
x = threading.Thread(target=my_queue.put(test('first','ok1')))
x1 = threading.Thread(target=my_queue.put(test('second','ok2')))
x1.start()
x.start()
print('\nrun something')
x.join()
x1.join()
t2_start = perf_counter()
print(f'\n time:{t2_start-t1_start}')
The script will still work, but the threading not. I have the following return:
Why do I need to use a lambda function for threading to work?
Related
I would like to perform the following below using multiprocess, instead of subprocess.Popen. This is because I cannot pass objects using popen. I know my simple example below does not use/pass objects, but that is what I want to do.
Sample code is:
main.py
import subprocess
class ProcReader():
def __init__(self, python_file):
self.proc = subprocess.Popen(['python', python_file], stdout=subprocess.PIPE)
def __iter__(self):
return self
def __next__(self):
while True:
line = self.proc.stdout.readline()
if not line:
raise StopIteration
return line
if __name__ == "__main__":
r1 = ProcReader("test1.py")
r2 = ProcReader("test2.py")
r3 = ProcReader("test3.py")
for l1, l2, l3 in zip(r1, r2, r3):
d1 = l1.decode('utf-8').strip().split(",")
d2 = l2.decode('utf-8').strip().split(",")
d3 = l3.decode('utf-8').strip().split(",")
print(f"{d1[0]}:{d1[1]},{d2[0]}:{d2[1]},{d3[1]}:{d3[1]}")
test#.py
for x in range(10):
print("test1,{}".format(x))
My sample code is in python3, but I would like an equivalent, using multiprocess, in python2.7. Should the equivalent also read from stdout? Or should it utilize the queue and just have a worker reading from the queue?
Update---------
My example using multiprocessing:
import time
from multiprocessing import Process, Queue
def writer1(queue):
for x in range(10):
time.sleep(1)
queue.put("test1,{}".format(x))
def writer2(queue):
for x in range(10):
time.sleep(2)
queue.put("test2,{}".format(x))
def writer3(queue):
for x in range(10):
queue.put("test3,{}".format(x))
if __name__=='__main__':
q1 = Queue()
q2 = Queue()
q3 = Queue()
writer_1 = Process(target=writer1, args=((q1),))
writer_1.daemon = True
writer_1.start()
writer_2 = Process(target=writer2, args=((q2),))
writer_2.daemon = True
writer_2.start()
writer_3 = Process(target=writer3, args=((q3),))
writer_3.daemon = True
writer_3.start()
while True:
msg1 = q1.get()
msg2 = q2.get()
msg3 = q3.get()
if msg1 and msg2 and msg3:
d1 = msg1.strip().split(",")
d2 = msg2.strip().split(",")
d3 = msg3.strip().split(",")
print("{}:{},{}:{},{}:{}".format(d1[0],d1[1],
d2[0],d2[1],
d3[0],d3[1]))
else:
break
Didnt realize q1.get() waits until something is there, I added sleep to verify this. Also, how do I check that the process is done writing? Seems to be just waiting at the end
To adapt your second example for my comment about sentinel objects, maybe you're looking for something like
import os
import time
from multiprocessing import Process, Queue
def writer(queue):
value = os.getpid()
for x in range(10):
time.sleep(0.1)
queue.put("{},{}".format(value, x))
queue.put(None)
def spawn_process():
q = Queue()
p = Process(target=writer, args=(q,))
p.daemon = True
p.start()
return (p, q)
if __name__ == "__main__":
processes_and_queues = [spawn_process() for x in range(3)]
processes, queues = zip(*processes_and_queues)
live_queues = list(queues)
while live_queues:
messages = []
for queue in live_queues:
message = queue.get()
if message is None:
live_queues.remove(queue)
messages.append(message)
if len(messages) == len(processes):
print(messages)
It outputs (e.g.)
['51748,0', '51749,0', '51750,0']
['51748,1', '51749,1', '51750,1']
['51748,2', '51749,2', '51750,2']
['51748,3', '51749,3', '51750,3']
['51748,4', '51749,4', '51750,4']
['51748,5', '51749,5', '51750,5']
['51748,6', '51749,6', '51750,6']
['51748,7', '51749,7', '51750,7']
['51748,8', '51749,8', '51750,8']
['51748,9', '51749,9', '51750,9']
I already managed executing one function periodically with a specific sampling time T with the python scheduler from the sched package:
import sched
import time
def cycle(sche, T, fun, arg):
sche.enter(T, 1, cycle, (sche, T, fun, arg))
fun(arg)
def fun(arg):
print(str(time.time()))
print(arg)
def main():
scheduler = sched.scheduler(time.time, time.sleep)
T = 1
arg = "some argument"
cycle(scheduler, T, fun, arg)
scheduler.run()
What I would like to do is adding another function fun2(), that also will be executed periodically with another sample time T2.
What would be a proper way to do that?
So for me the following solution worked:
as I will have two CPU bound tasks I set up a multiprocessing environment with two processes. Each process starts an own scheduler that runs 'forever' with its own 'sampling' time.
What does anybody with more experience in python than me (I've just started :-D) think about this approach? Will it cause any problems in your opinion?
import time
import multiprocessing
import sched
global schedule1
global schedule2
def fun1(arg):
print("Im the function that is executed every T1")
time.sleep(0.05) # do something for t < T1
def fun2(arg):
print("Im the function that is executed every T2")
time.sleep(0.8) # do something for t < T2
def cycle1(scheduler1, T1, fun, arg):
global schedule1
try:
schedule1.append(scheduler1.enter(T1, 1, cycle1, (scheduler1, T1, fun, arg)))
fun1(arg)
scheduler1.run()
except KeyboardInterrupt:
for event in schedule1:
try:
scheduler1.cancel(event)
except ValueError:
continue
return
def cycle2(scheduler2, T2, fun, arg):
global schedule2
try:
schedule2.append(scheduler2.enter(T2, 1, cycle2, (scheduler2, T2, fun, arg)))
fun2(arg)
scheduler2.run()
except KeyboardInterrupt:
for event in schedule2:
try:
scheduler2.cancel(event)
except ValueError:
continue
return
def main():
global schedule2
global schedule1
schedule2 = []
schedule1 = []
scheduler1 = sched.scheduler(time.time, time.sleep)
scheduler2 = sched.scheduler(time.time, time.sleep)
T1 = 0.1
T2 = 1
list_of_arguments_for_fun1 = []
list_of_arguments_for_fun2 = []
processes = []
# set up first process
process1 = multiprocessing.Process(target=cycle1, args=(scheduler1, T1, fun1, list_of_arguments_for_fun1))
processes.append(process1)
# set up second process
process2 = multiprocessing.Process(target=cycle2, args=(scheduler2, T2, list_of_arguments_for_fun2, list_of_arguments_for_fun2))
processes.append(process2)
process1.start()
process2.start()
for process in processes:
process.join()
# anything below here in the main() won't be executed
if __name__ == "__main__":
try:
start = time.perf_counter()
main()
except KeyboardInterrupt:
print('\nCancelled by User. Bye!')
finish = time.perf_counter()
print(f'Finished in {round(finish - start, 2)} second(s)')
I'm new to python but i'm working on a project in which i need to connect 3 or more Ultrasonic Sensors concurrently. I read all about threads and multiprocessing ran a couple of examples successfully. I know the code has to be run form the command prompt or the PI2 terminal. However, the multiprocessing code I wrote does not work and I cannot figure out why. Could someone please help me.
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A(name):
while True:
ultrasonic.wait_for_in_range()
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B(name):
while True:
ultrasonic_B.wait_for_in_range()
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C(name):
while True:
ultrasonic_C.wait_for_in_range()
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.start()
p1.start()
p2.start()
I took your suggestions into consideration and was able to make the first loop work but the other 2 loops give me nothing. Here is the updated code
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A():
while ultrasonic.wait_for_in_range():
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B():
while ultrasonic_B.wait_for_in_range():
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C():
while ultrasonic_C.wait_for_in_range():
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.run()
p1.run()
p2.run()
Result comes form first loop
Distance
Out of range
Distance
Out of range
OK this is what I have
from multiprocessing import Process
from gpiozero import DistanceSensor
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic.max_distance = 1
ultrasonic_B.max_distance = 1
ultrasonic_C.max_distance = 1
def A():
while ultrasonic.wait_for_in_range():
print('Distance')
if ultrasonic.wait_for_out_of_range():
print('Out of range')
def B():
while ultrasonic_B.wait_for_in_range():
print('Distance_B')
if ultrasonic_B.wait_for_out_in_range():
print('Out of range_B')
def C():
while ultrasonic_C.wait_for_in_range():
print('Distance_B')
if ultrasonic_C.wait_for_out_in_range():
print('Out of range_B')
if __name__ == "__main__":
p = Process(target=A)
p1 = Process(target=B)
p2 = Process(target=C)
p.start()
p1.start()
p2.start()
You do not say what you mean with "does not work", so I am taking a few guesses here.
The obvious fail here would be:
TypeError: A() takes exactly 1 argument (0 given)
Since functions A, B and C all take an argument name, and you do not provide it in Process(target=A). It works if you just remove the parameter from the functions, since you are not even using it.
You can also provide the argument in the call like this:
p = Process(target=A, args=('ultra_a',))
Other one could be indentation error, at least in your code paste you have one extra space at each line until def B.
import multiprocess
from gpiozero import DistanceSensor
def A():
ultrasonic = DistanceSensor(echo=18, trigger=23)
ultrasonic.max_distance = 1
while ultrasonic.wait_for_in_range():
print('Sensor_1' ultrasonic.distance, 'm')
if ultrasonic.wait_for_out_of_range():
print('0')
def B():
ultrasonic_B = DistanceSensor(echo=25, trigger=24)
ultrasonic_B.max_distance = 1
while ultrasonic_B.wait_for_in_range():
print('Sensor_2', ultrasonic_B.distance, 'm')
if ultrasonic_B.wait_for_out_in_range():
print('0')
def C():
ultrasonic_C = DistanceSensor(echo=12, trigger=16)
ultrasonic_C.max_distance = 1
while ultrasonic_C.wait_for_in_range():
print('Sensor_3',ultrasonic_C.distance, 'm')
if ultrasonic_C.wait_for_out_in_range():
print('0')
if __name__ == "__main__":
p = multiprocess.Process(target=A)
p1 = multiprocess.Process(target=B)
p2 = multiprocess.Process(target=C)
p.start()
p1.start()
p2.start()
Trying to understand the following results when using multithreading with python. The following code prints A and B to the console in random sequence, which is what I would like to achieve. But the second piece of code only prints "A" to the console, and never proceeds past t1.start(). Why is this? What do I need to do the the second section of code to make it behave like the first?
Thanks in advance, this is my first post.
This is the behavior I want :
from threading import Thread
def runA():
while True:
print ('A\n')
def runB():
while True:
print ('B\n')
if __name__ == "__main__":
t1 = Thread(target = runA())
t2 = Thread(target = runB())
t1.setDaemon(True)
t2.setDaemon(True)
t1.start()
t2.start()
while True:
pass
I want the behavior produced from the above code but using classes like in the example below. The code below never executes t2.start(). Why is this?
from threading import Thread
class test():
def runA(self):
while True:
print ('A\n')
def runB(self):
while True:
print ('B\n')
if __name__ == "__main__":
testingNow=test()
t1 = Thread(target = testingNow.runA())
t2 = Thread(target = testingNow.runB())
t1.setDaemon(True)
t2.setDaemon(True)
t1.start()
t2.start()
while True:
pass
Get rid of the () in testingNow.runA() and testingNow.runB().
I have the following block of code that is part of a larger program. I am trying to get it to print the execution time once all of the threads are closed but can't seem to get it to work. Any ideas?
import time
import csv
import threading
import urllib.request
def openSP500file():
SP500 = reader(open(r'C:\Users\test\Desktop\SP500.csv', 'r'), delimiter=',')
for x in SP500:
indStk = x[0]
t1 = StockData(indStk)
t1.start()
if not t1.isAlive():
print(time.clock()-start_time, 'seconds')
else:
pass
def main():
openSP500file()
if __name__ == '__main__':
start_time = time.clock()
main()
Thanks!
You aren't waiting for all the threads to finish (only the last one created). Perhaps something like this in your thread-spawning loop?
threads = []
for x in SP500:
t1 = StockData(x[0])
t1.start()
threads.append(t1)
for t in threads:
t.join()
... print running time