I want to run a code with process parallel to my main code but also want to access its parameters or start/stop the process via command prompt.
my machine is win7 64bit. Something in mind is:
from multiprocessing import Process
class dllapi():
...
def apiloop(params, args):
apiclient = dllapi(**args)
while True:
apiclient.cycle()
params = [....]
def mainloop(args):
p = Process(target = apiloop, args=(params, args, ))
while True:
cmd = input()
if cmd == 'kill':
p.terminate()
if cmd == 'stop':
pass # no idea
if cmd == 'resume':
pass # no idea
if cmd == 'report':
print (params)
I wish to make it simple. I did tried to make apiloop as thread yet input() could freeze the program and stopped apiloop working until i pressed enter...
To share the parameters from apiloop process, i did try queue and pipe, but, seem to me, queue needs .join to wait until apiloop is done and pipe has buffer limit.
(actually i can make apiclient.cycle runs every 1s but i wish to keep apiclient alive)
I wish to know if it's worth to dig deeper with multiprocessing (e.g. will try manager as well...) or there are other approaches which is more suitable for my case. Thanks in advance...
* UPDATED: 201809170953*
Some progress with manager as below:
from multiprocessing import Process, Manager
class dllapi():
...
class webclientapi():
...
def apiloop(args, cmd, params):
apiclient = dllapi(**args)
status = True
while True:
# command from main
if cmd == 'stop':
status = False
elif cmd == 'start':
status = True
cmd = None
# stop or run
if status == True:
apiclient.cycle()
# update parameters
params['status'] = status
def uploadloop(cmds, params):
uploadclient = webclientapi()
status = True
while True:
# command from main
if cmd == 'stop':
status = False
elif cmd == 'start':
status = True
cmd = None
# stop or run
if status == True:
# upload 'status' from apiclient to somewhere
uploadclient.cycle(params['status'])
def mainloop(args):
manager = Manager()
mpcmds = {}
mpparams = {}
mps = {}
mpcmds ['apiloop'] = manager.Value('u', 'start')
mpparams ['apiloop'] = manager.dict()
mps ['apiloop'] = Process(target = apiloop, args=(args, mpcmds['apiloop'], mpparams['apiloop'])
mpcmds ['uploadloop'] = manager.Value('u', 'start')
# mpparams ['uploadloop'] is directly from mpparams ['apiloop']
mps ['uploadloop'] = Process(target = uploadloop, args=(mpcmds['uploadloop'], mpparams['apiloop'])
for key, mp in mps.items():
mp.daemon = True
mp.start()
while True:
cmd = input().split(' ')
# kill daemon process with exit()
if cmd[0] == 'bye':
exit()
# kill individual process
if cmd[0] == 'kill':
mps[cmd[1]].terminate()
# stop individual process via command
if cmd[0] == 'stop':
mpcmds[cmd[1]] = 'stop'
# stop individual process via command
if cmd[0] == 'start':
mpcmds[cmd[1]] = 'start'
# report individual process info via command
if cmd[0] == 'report':
print (mpparams ['apiloop'])
Hope this'd help someone.
I'm showing you how to solve the general problem with threads only, because that is what you tried first and your example doesn't bring up the need for a child-process.
In the example below your dllapi class is named Zoo and it's subclassing threading.Thread, adding some methods to allow execution control. It takes some data upon initialization and its cycle-method simply iterates repeatedly over this data and just counts how many times it has seen the specific item.
import time
import logging
from queue import Queue
from threading import Thread
from itertools import count, cycle
class Zoo(Thread):
_ids = count(1)
def __init__(self, cmd_queue, data, *args,
log_level=logging.DEBUG, **kwargs):
super().__init__()
self.name = f'{self.__class__.__name__.lower()}-{next(self._ids)}'
self.data = data
self.log_level = log_level
self.args = args
self.kwargs = kwargs
self.logger = self._init_logging()
self.cmd_queue = cmd_queue
self.data_size = len(data)
self.actual_item = None
self.iter_cnt = 0
self.cnt = count(1)
self.cyc = cycle(self.data)
def cycle(self):
item = next(self.cyc)
if next(self.cnt) % self.data_size == 0: # new iteration round
self.iter_cnt += 1
self.actual_item = f'{item}_{self.iter_cnt}'
def run(self):
"""
Run is the main-function in the new thread. Here we overwrite run
inherited from threading.Thread.
"""
while True:
if self.cmd_queue.empty():
self.cycle()
time.sleep(1) # optional heartbeat
else:
self._get_cmd()
self.cmd_queue.task_done() # unblocks prompter
def stop(self):
self.logger.info(f'stopping with actual item: {self.actual_item}')
# do clean up
raise SystemExit
def pause(self):
self.logger.info(f'pausing with actual item: {self.actual_item}')
self.cmd_queue.task_done() # unblocks producer joining the queue
self._get_cmd() # just wait blockingly until next command
def resume(self):
self.logger.info(f'resuming with actual item: {self.actual_item}')
def report(self):
self.logger.info(f'reporting with actual item: {self.actual_item}')
print(f'completed {self.iter_cnt} iterations over data')
def _init_logging(self):
fmt = '[%(asctime)s %(levelname)-8s %(threadName)s' \
' %(funcName)s()] --- %(message)s'
logging.basicConfig(format=fmt, level=self.log_level)
return logging.getLogger()
def _get_cmd(self):
cmd = self.cmd_queue.get()
try:
self.__class__.__dict__[cmd](self)
except KeyError:
print(f'Command `{cmd}` is unknown.')
input is a blocking function. You need to outsource it in a separate thread so it doesn't block your main-thread. In the example below input is wrapped in Prompter, a class subclassing threading.Thread. Prompter passes inputs into a command-queue. This command-queue is read by Zoo.
class Prompter(Thread):
"""Prompt user for command input.
Runs in a separate thread so the main-thread does not block.
"""
def __init__(self, cmd_queue):
super().__init__()
self.cmd_queue = cmd_queue
def run(self):
while True:
cmd = input('prompt> ')
self.cmd_queue.put(cmd)
self.cmd_queue.join() # blocks until consumer calls task_done()
if __name__ == '__main__':
data = ['ape', 'bear', 'cat', 'dog', 'elephant', 'frog']
cmd_queue = Queue()
prompter = Prompter(cmd_queue=cmd_queue)
prompter.daemon = True
zoo = Zoo(cmd_queue=cmd_queue, data=data)
prompter.start()
zoo.start()
Example session in terminal:
$python control_thread_over_prompt.py
prompt> report
[2018-09-16 17:59:16,856 INFO zoo-1 report()] --- reporting with actual item: dog_0
completed 0 iterations over data
prompt> pause
[2018-09-16 17:59:26,864 INFO zoo-1 pause()] --- pausing with actual item: bear_2
prompt> resume
[2018-09-16 17:59:33,291 INFO zoo-1 resume()] --- resuming with actual item: bear_2
prompt> report
[2018-09-16 17:59:38,296 INFO zoo-1 report()] --- reporting with actual item: ape_3
completed 3 iterations over data
prompt> stop
[2018-09-16 17:59:42,301 INFO zoo-1 stop()] --- stopping with actual item: elephant_3
Related
I use a dedicated Python (3.8) library to control a motor drive via a USB port.
The Python library provided by the motor control drive manufacturers (ODrive) allows a single Python process to control one or more drives.
However, I would like to run 3 processes, each controlling 1 drive.
After researching options (I first considered virtual machines, Docker containers, and multi-threading) I began believing that the easiest way to do so would be to use multiprocessing.
My problem is that I would then need a way to manage (i.e., start, monitor, and stop independently) multiple processes. The practical reason behind it is that motors are connected to different setups. Each setup must be able to be stopped and restarted separate if malfunctioning, for instance, but other running setups should not be affected by this action.
After reading around the internet and Stack Overflow, I now understand how to create a Pool of processing, how to associate processes with processor cores, how to start a pool of processes, and queuing/joining them (the latter not being needed for me).
What I don't know is how to manage them independently.
How can I separately start/stop different processes without affecting the execution of the others?
Are there libraries to manage them (perhaps even with a GUI)?
I'd probably do something like this:
import random
import time
from multiprocessing import Process, Queue
class MotorProcess:
def __init__(self, name, com_related_params):
self.name = name
# Made up some parameters relating to communication
self._params = com_related_params
self._command_queue = Queue()
self._status_queue = Queue()
self._process = None
def start(self):
if self._process and self._process.is_alive():
return
self._process = Process(target=self.run_processing,
args=(self._command_queue, self._status_queue,
self._params))
self._process.start()
#staticmethod
def run_processing(command_queue, status_queue, params):
while True:
# Check for commands
if not command_queue.empty():
msg = command_queue.get(block=True, timeout=0.05)
if msg == "stop motor":
status_queue.put("Stopping motor")
elif msg == "exit":
return
elif msg.startswith("move"):
status_queue.put("moving motor to blah")
# TODO: msg parsing and move motor
else:
status_queue.put("unknown command")
# Update status
# TODO: query motor status
status_queue.put(f"Motor is {random.randint(0, 100)}")
time.sleep(0.5)
def is_alive(self):
if self._process and self._process.is_alive():
return True
return False
def get_status(self):
if not self.is_alive():
return ["not running"]
# Empty the queue
recent = []
while not self._status_queue.empty():
recent.append(self._status_queue.get(False))
return recent
def stop_process(self):
if not self.is_alive():
return
self._command_queue.put("exit")
# Empty the stats queue otherwise it could potentially stop
# the process from closing.
while not self._status_queue.empty():
self._status_queue.get()
self._process.join()
def send_command(self, command):
self._command_queue.put(command)
if __name__ == "__main__":
processes = [MotorProcess("1", None), MotorProcess("2", None)]
while True:
cmd = input()
if cmd == "start 1":
processes[0].start()
elif cmd == "move 1 to 100":
processes[0].send_command("move to 100")
elif cmd == "exit 1":
processes[0].stop_process()
else:
for n, p in enumerate(processes):
print(f"motor {n + 1}", end="\n\t")
print("\n\t".join(p.get_status()))
Not production ready (e.g. no exception handling, no proper command parsing, etc.) but shows the idea.
Shout if there are any problems :D
You can create multiple multriprocessing.Process instances manually like this:
def my_func(a, b):
pass
p = multiprocessing.Process(target=my_func, args=(100, 200)
p.start()
and manage them using multiprocessing primitives Queue, Event, Condition etc. Please refer to the official documentation for details: https://docs.python.org/3/library/multiprocessing.html
In the following example multiple processes are started and stopped independently. Event is used to determine when to stop a process. Queue is used for results passing from the child processes to the main process.
import multiprocessing
import queue
import random
import time
def worker_process(
process_id: int,
results_queue: multiprocessing.Queue,
to_stop: multiprocessing.Event,
):
print(f"Process {process_id} is started")
while not to_stop.is_set():
print(f"Process {process_id} is working")
time.sleep(0.5)
result = random.random()
results_queue.put((process_id, result))
print(f"Process {process_id} exited")
process_pool = []
result_queue = multiprocessing.Queue()
while True:
if random.random() < 0.3:
# staring a new process
process_id = random.randint(0, 10_000)
to_stop = multiprocessing.Event()
p = multiprocessing.Process(
target=worker_process, args=(process_id, result_queue, to_stop)
)
p.start()
process_pool.append((p, to_stop))
if random.random() < 0.2:
# closing a random process
if process_pool:
process, to_stop = process_pool.pop(
random.randint(0, len(process_pool) - 1)
)
to_stop.set()
process.join()
try:
p_id, result = result_queue.get_nowait()
print(f"Completed: process_id={p_id} result={result}")
except queue.Empty:
pass
time.sleep(1)
Hello,
I have the below code that corrects user input and I want to exit the blocking function keyboard.read_event when the control is returned from the correction thread.
The whole program works well but I cannot exit immediately after the corrector thread is finished (the program waits for key press).
I tried using a custom Exception for interrupting the keyboard.read_event function, but I didn't manage to make it work.
import keyboard
import threading
import time
class Interrupt_Custom_Exception(Exception):
"""Base class for other exceptions"""
pass
#########################################################
def delete_and_write(times_to_delete, word_to_write):
print("------------Deleting & Rewrite Started---")
time.sleep(2)
print("------------Deleting & Rewrite Ended---")
# simulate deletion and rewrite
#**here I tried the raise Interrupt_Custom_Exception and tried to catch it at the code in the class, but didn't work**
def write_the_suppressed_string(string):
keyboard.write(string)
#########################################################
class keyboard_monitor(threading.Thread):
def __init__(self,thread_name, threadID, word_typed, keyboard_suppress, counter_for_key_pressed):
threading.Thread.__init__(self)
self.name = thread_name
self.threaID = threadID
self.fstring = word_typed
self.counter_for_key_presses = counter_for_key_pressed
self.suppressed = keyboard_suppress
self.temp = ""
def stop(self):
self._is_running = False
def run(self):
if (self.suppressed is False):
while(True):
event = keyboard.read_event(suppress = self.suppressed)
if (event.event_type == keyboard.KEY_DOWN):
if (event.name == "space"):
suppressed_monitor = keyboard_monitor("suppressed_monitor", 2, self.fstring, True, self.counter_for_key_presses)
suppressed_monitor.start()
suppressed_monitor.join()
print("RETURNED TO MAIN MONITOR")
self.counter_for_key_presses = 0
self.fstring = ""
elif (event.name in "abcdefghijklmnopqrstuvwxyz"):
self.fstring = ''.join([self.fstring, event.name])
self.counter_for_key_presses += 1
elif (self.suppressed is True):
def listen_to_keyboard():
event = keyboard.read_event(suppress=self.suppressed)
# **here is where the program waits and don't continue when the correction thread is finished.**
if (event.event_type == keyboard.KEY_DOWN):
print("---KEYS PRESSED WHILE SUPPRESSED = {}---".format(event.name))
if (event.name in "abcdefghijklmnopqrstuvwxyz"):
self.fstring = ''.join([self.fstring, event.name])
self.counter_for_key_presses += 1
try:
#########################################################
# INITIALY CORRECTING THE WORD PASSED FROM THE NORMAL KEY MONITOR
self.temp = self.fstring
self.fstring = ""
thread_delete_and_rewrite = threading.Thread(
target = delete_and_write, args=(self.counter_for_key_presses, self.temp))
thread_delete_and_rewrite.start()
# raise Interrupt_Custom_Exception
#########################################################
print("-BEFORE WHILE LOOP-")
while(thread_delete_and_rewrite.is_alive() is True): # **this works ok but if the control enters the listen_to_keyboard function waits there until a key is pressed. I want somehow to stop this manually and continue the code after this while**
print("--ENTERING THE WHILE LOOP--")
listen_to_keyboard()
print("----EXITING THE WHILE LOOP----\n")
except Interrupt_Custom_Exception:
print("!!!!!!!!!!!!!!!!!CAUGHT IT!!!!!!!!!!!!!!!!!!!")
print("----EXITING THE WHILE LOOP----\n")
print("------BEFORE FINAL WRITE------")
if (self.fstring != ""):
thread_write = threading.Thread(
target = write_the_suppressed_string, args=(self.fstring, ))
thread_write.start()
thread_write.join()
print("SUPPRESSED ENDED")
self._is_running = False
if __name__ == "__main__":
kb_not_suppressed = keyboard_monitor("not_suppressed_monitor", 1, "", False, 0)
kb_not_suppressed.start()
kb_not_suppressed.join()
Any idea on how to exit this blocking function would be very very useful.
Thanks in advance.
It's not possible unless you find some keyboard.read_event that has a timeout, or does a non-blocking check if there's a event. I haven't found any of those in keyboard module ;/
A big workaround would be to keyboard.press in case you want to exit. Not sure if you can detect if it's not from the user. It's up to you if it's acceptable.
in my Python script I am triggering a long process (drive()) that is encapsulated into a class method:
car.py
import time
class Car(object):
def __init__(self, sleep_time_in_seconds, miles_to_drive):
self.sleep_time_in_seconds = sleep_time_in_seconds
self.miles_to_drive = miles_to_drive
def drive(self):
for mile in range(self.miles_to_drive):
print('driving mile #{}'.format(mile))
time.sleep(self.sleep_time_in_seconds)
app.py
from car import Car
sleep_time = 2
total_miles = 5
car = Car(sleep_time_in_seconds=sleep_time, miles_to_drive=total_miles)
car.drive()
def print_driven_distance_in_percent(driven_miles):
print("Driven distance: {}%".format(100 * driven_miles / total_miles))
In the main script app.py I'd like to know the progress of the drive() process. One way of solving this would be to create a loop that polls the current progress from the Car class. If the Car class would inherit from Thread - polling seems to be an expected pattern as far as I have googled...
I'm just curious whether it's possible to somehow notify the main script from within the Car class about the current progress.
I thought about maybe creating a wrapper class that I can pass as argument to the Car class, and the car instance then can call the wrapper class' print_progress function.
Or is there a more pythonic way to notify the caller script on demand?
Thanks
EDIT:
Based on Artiom Kozyrev's answer - this is what I wanted to achieve:
import time
from threading import Thread
from queue import Queue
def ask_queue(q):
"""
The function to control status of our status display thread
q - Queue - need to show status of task
"""
while True:
x = q.get() # take element from Queue
if x == "STOP":
break
print("Process completed in {} percents".format(x))
print("100% finished")
class MyClass:
"""My example class"""
def __init__(self, name, status_queue):
self.name = name
self.status_queue = status_queue
def my_run(self):
"""
The function we would like to monitor
"""
# th = Thread(target=MyClass.ask_queue, args=(self.status_queue,), ) # monitoring thread
# th.start() # start monitoring thread
for i in range(100): # start doing our main function we would like to monitor
print("{} {}".format(self.name, i))
if i % 5 == 0: # every 5 steps show status of progress
self.status_queue.put(i) # send status to Queue
time.sleep(0.1)
self.status_queue.put("STOP") # stop Queue
# th.join()
if __name__ == "__main__":
q = Queue()
th = Thread(target=ask_queue, args=(q,), ) # monitoring thread
th.start() # start monitoring thread
# tests
x = MyClass("Maria", q)
x.my_run()
th.join()
Thanks to all!!
Thanks for interesting question, typically you do not need to use status as a separate thread for the case, you can just print status in the method you would like to monitor, but for training purpose you solve the issue the follwoing way, please follow comments and feel free to ask:
import time
from threading import Thread
from queue import Queue
class MyClass:
"""My example class"""
def __init__(self, name, status_queue):
self.name = name
self.status_queue = status_queue
#staticmethod
def ask_queue(q):
"""
The function to control status of our status display thread
q - Queue - need to show status of task
"""
while True:
x = q.get() # take element from Queue
if x == "STOP":
break
print("Process completed in {} percents".format(x))
print("100% finished")
def my_run(self):
"""
The function we would like to monitor
"""
th = Thread(target=MyClass.ask_queue, args=(self.status_queue,), ) # monitoring thread
th.start() # start monitoring thread
for i in range(100): # start doing our main function we would like to monitor
print("{} {}".format(self.name, i))
if i % 5 == 0: # every 5 steps show status of progress
self.status_queue.put(i) # send status to Queue
time.sleep(0.1)
self.status_queue.put("STOP") # stop Queue
th.join()
if __name__ == "__main__":
# tests
x = MyClass("Maria", Queue())
x.my_run()
print("*" * 200)
x.my_run()
I am trying to make 2 processes communicate between each other using the multiprocessing package in Python, and more precisely the Queue() class. From the parent process, I want to get an updated value of the child process each 5 seconds. This child process is a class function. I have done a toy example where everything works fine.
However, when I try to implement this solution in my project, it seems that the Queue.put() method of the child process in the sub-module won't send anything to the parent process, because the parent process won't print the desired value and the code never stops running. Actually, the parent process only prints the value sent to the child process, which is True here, but as I said, never stops.
So my questions are:
Is there any error in my toy-example ?
How should I modify my project in order to get it working just like my toy example ?
Toy example: works
main module
from multiprocessing import Process, Event, Lock, Queue, Pipe
import time
import test_mod as test
def loop(output):
stop_event = Event()
q = Queue()
child_process = Process(target=test.child.sub, args=(q,))
child_process.start()
i = 0
print("started at {} ".format(time.time()))
while not stop_event.is_set():
i+=1
time.sleep(5)
q.put(True)
print(q.get())
if i == 5:
child_process.terminate()
stop_event.set()
output.put("main process looped")
if __name__ == '__main__':
stop_event, output = Event(), Queue()
k = 0
while k < 5:
loop_process = Process(target=loop, args=(output,))
loop_process.start()
print(output.get())
loop_process.join()
k+=1
submodule
from multiprocessing import Process, Event, Lock, Queue, Pipe
import time
class child(object):
def __init__(self):
pass
def sub(q):
i = 0
while i < 2000:
latest_value = time.time()
accord = q.get()
if accord == True:
q.put(latest_value)
accord = False
time.sleep(0.0000000005)
i+=1
Project code: doesn't work
main module
import neat #package in which the submodule is
import *some other stuff*
def run(config_file):
config = neat.Config(some configuration)
p = neat.Population(config)
**WHERE MY PROBLEM IS**
stop_event = Event()
q = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, q, other args))
child_process.start()
i = 0
while not stop_event.is_set():
q.put(True)
print(q.get())
time.sleep(5)
i += 1
if i == 5:
child_process.terminate()
stop_event.set()
if __name__ == '__main__':
run(config_file)
submodule
class Population(object):
def __init__():
*initialization*
def run(self, q, other args):
while n is None or k < n:
*some stuff*
accord = add_2.get()
if accord == True:
add_2.put(self.best_genome.fitness)
accord = False
return self.best_genome
NB:
I am not used to multiprocessing
I have tried to give the most relevant parts of my project, given that the entire code would be far too long.
I have also considered using Pipe(), however this option didn't work either.
If I see it correctly, your desired submodule is the class Population. However, you start your process with a parameter of the type ParallelEvaluator. Next, I can't see that you supply your Queue q to the sub-Process. That's what I see from the code provided:
stop_event = Event()
q = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, **args)
child_process.start()
Moreover, the following lines create a race condition:
q.put(True)
print(q.get())
The get command is like a pop. So it takes an element and deletes it from the queue. If your sub-process doesn't access the queue between these two lines (because it is busy), the True will never make it to the child-process. Hence, it is better two use multiple queues. One for each direction. Something like:
stop_event = Event()
q_in = Queue()
q_out = Queue()
pe = neat.ParallelEvaluator(**args)
child_process = Process(target=p.run, args=(pe.evaluate, **args))
child_process.start()
i = 0
while not stop_event.is_set():
q_in.put(True)
print(q_out.get())
time.sleep(5)
i += 1
if i == 5:
child_process.terminate()
stop_event.set()
This is your submodule
class Population(object):
def __init__():
*initialization*
def run(self, **args):
while n is None or k < n:
*some stuff*
accord = add_2.get() # add_2 = q_in
if accord == True:
add_3.put(self.best_genome.fitness) #add_3 = q_out
accord = False
return self.best_genome
I'm writing a Python module to read jstest output and make Xbox gamepad working in Python on Linux. I need to start in background infinite while loop in __init__ on another thread that looks like this:
import os
from threading import Thread
import time
import select
import subprocess
class Joystick:
"""Initializes base class and launches jstest and xboxdrv"""
def __init__(self, refreshRate=2000, deadzone=4000):
self.proc = subprocess.Popen(['xboxdrv', '-D', '-v', '--detach-kernel-driver', '--dpad-as-button'], stdout=subprocess.PIPE, bufsize=0)
self.pipe = self.proc.stdout
self.refresh = refreshRate
self.refreshDelay = 1.0 / refreshRate
self.refreshTime = 0 # indicates the next refresh
self.deadzone = deadzone
self.start()
self.xbox = subprocess.Popen(['jstest', '--normal', '/dev/input/js0'], stdout=subprocess.PIPE, bufsize=-1, universal_newlines=True)
self.response = self.xbox.stdout.readline()
a = Thread(target=self.reload2())
a.start()
print("working")
def reload2(self):
while True:
self.response = self.xbox.stdout.readline()
print("read")
time.sleep(0.5)
def start(self):
global leftVibrateAmount, rightVibrateAmount
leftVibrateAmount = 0
rightVibrateAmount = 0
readTime = time.time() + 1 # here we wait a while
found = False
while readTime > time.time() and not found:
readable, writeable, exception = select.select([self.pipe], [], [], 0)
if readable:
response = self.pipe.readline()
# tries to detect if controller is connected
if response == b'[ERROR] XboxdrvDaemon::run(): fatal exception: DBusSubsystem::request_name(): failed to become primary owner of dbus name\n':
raise IOError("Another instance of xboxdrv is running.")
elif response == b'[INFO] XboxdrvDaemon::connect(): connecting slot to thread\n':
found = True
self.reading = response
elif response == b'':
raise IOError('Are you running as sudo?')
if not found:
self.pipe.close()
# halt if controller not found
raise IOError("Xbox controller/receiver isn't connected")
The loop is defined to start running in __init__ function like so:
a = threading.Thread(target=self.reload2) # code hangs here
a.start()
But each time I create variable "a", whole program hangs in while loop, which should be running in another thread.
Thanks for help.
You may be having issues with your __init__. I put it in a simple class as an example, and it runs as expected.
import time
from threading import Thread
class InfiniteLooper():
def __init__(self):
a = Thread(target=self.reload2) # reload, not reload(), otherwise you're executing reload2 and assigning the result to Target, but it's an infinite loop, so...
print('Added thread')
a.start()
print('Thread started')
def reload2(self):
while True:
self.response = input('Enter something')
print('read')
time.sleep(0.5)
loop = InfiniteLooper()
Output:
Added thread
Thread started
Enter something
1
read
Enter something
1
read
As you can see, the "Enter something" appears after I've added the thread and started it. It also loops fine