I have two classes in Python script. One of is Main() and the second is Loading()
class Main:
pass
class Loading:
pass
At first, works Main() that return was filled dictionary
Then is created instance of Loading() that iterates all images and downloads them:
## LOAD IMAGES ##
imageLoader = Loading()
imageLoader.save()
So, the problem is when I call this script it creates one major thread that is waiting for the end of imageLoader = Loading().
As a result, a major thread works so long, it invokes 502 Server error.
How to run imageLoader = Loading() in a separate background thread that to release major thread?
What will be launched first in this code:
LOADED_IMAGES = {}
IMAGES_ERRORS = []
IMAGES = {"A": "https://images.aif.ru/009/299/3378e1a1ab2d1c6e6be6d38253dd3632.jpg", "B": "http://static1.repo.aif.ru/1/77/623957/b99ee5f894f38261e4d3778350ffbaae.jpg"}
excel = Excel()
excel.readExcel(file_path, 'Main')
imageLoader = ImageLoader()
Thread(target=imageLoader.run().save()).start()
Does it work line by line or Thread will be created immediately?
**This is full code:**
class ImageLoader:
def run(self):
for article, image in IMAGES.items():
if image is None or image == '':
continue
LOADED_IMAGES[article] = self.loadImage(self.replaceHttpsProtocol(image), '/home/o/oliwin4/jara/public_html/image/catalog/s/')
def replaceHttpsProtocol(self, url):
return url.replace("https:", "http:")
def nameNameGenerate(self):
return int(round(time.time() * 1000))
def extention(self, path):
ext = path.split(".")[-1]
return '.' + ext if ext else "jpg"
def save(self):
for article, image in LOADED_IMAGES.items():
self.add(article, image)
def add(self, article, image):
Products.update(image=image).where(Products.sku == article).execute()
def loadImage(self, path, path_folder):
try:
filename = str(self.nameNameGenerate()) + str(self.extention(path))
wget.download(url=path, out=path_folder + filename)
return 'catalog/s/' + filename
except BaseException as e:
IMAGES_ERRORS.append(str(e))
Using:
def runOnThread():
imageLoader = ImageLoader()
imageLoader.run()
imageLoader.save()
if __name__ == "__main__":
Thread(target=runOnThread, daemon=True).start()
You need to look for which line is blocking your code to run it in a separated thread, usually the blocking line is some kind of I/O or expensive computation.
To do it you can use the threading module.
So, assuming that your blocking line is the
imageLoader.save()
Try to run it in a separated thread with this code.
from threading import Thread
Thread(target=imageLoader.save()).start()
As mentioned, you can use Python's threading module for this. Though, a thread takes a reference to a function (passing target a function call is useless / wrong).
In your case, if you want to both instantiate then run a function on an object in a separate thread, you should put these two in a function:
def runOnThread():
imageLoader = Loading()
imageLoader.save()
Then pass a reference of this function to a new thread, like so (notice no ()):
from threading import Thread
Thread(target=runOnThread).start()
If you don't want the main thread to wait for the new thread to finish, you could make it a Daemon thread, like so:
Thread(target=runOnThread, daemon=True).start()
Shorter version of all the above:
from threading import Thread
Thread(target=lambda: Loading().save(), daemon=True).start()
Related
I created a small flask app to download images and text from pages, this can take verly long time, so
I would like to execute my requests in parell. I create threaded tasks. I would like this tasks to be able to download text or images from sites. I keep my tasks in a list of workers.
However I would like to select a method which thread will execute and then start whole thread.
How can I pass my method to thread run method()? Will this be a sub daemon thread?
import threading
import time
workers = []
class SavePage:
def get_text(self):
print("Getting text")
def get_images(self):
print("Getting images")
class Task(threading.Thread):
def __init__(self):
super().__init__()
self.save_page = SavePage()
def get_text_from_page(self):
self.save_page.get_text()
def get_images_from_page(self):
self.save_page.get_images()
if __name__ == '__main__':
task = Task()
task.get_images_from_page() # Why this executes, when I didn't put task.start() ?
# Moreover, is this really threaded? or just uses a method from class Task?
workers.append(task) # I want this list to be empty, after job is finished
print("".join(str(worker.is_alive()) for worker in workers)) #
print(workers)
task.get_images_from_page() # Why this executes, when I didn't put task.start() ?
# Moreover, is this really threaded? or just uses a method from class Task?
It's not threaded. It's just a normal method call in the main thread.
Thread.start is the method that will start Thread.run function inside another thread.
You could set some state in __init__ to choose which function to execute:
class Task(threading.Thread):
def __init__(self, action):
super().__init__()
self.save_page = SavePage()
self.action = action
def get_text_from_page(self):
self.save_page.get_text()
def get_images_from_page(self):
self.save_page.get_images()
def run(self):
if self.action == "text":
self.get_text_from_page()
elif self.action == "images":
self.get_images_from_page()
Keep in mind that threads can be run in simpler way by passing target function:
def target_func():
save_page = SavePage()
save_page.get_images()
t = threading.Thread(target=target_func)
t.start()
# or in this simple case:
save_page = SavePage()
t = threading.Thread(target=save_page.get_images)
t.start()
Currently creating separate instances of my class, Example, then creating a thread for each instance and utilizing the Class's execute_thread function as the thread function target. The thread function continues running as long as the member variable exit_signal is not updated to True. Once control, shift, and 2 are pressed on the keyboard, the member variable isn't updated from within the thread instance.
The problem is the thread function isn't recognizing any change to the member variable, why isn't it detecting the change, is the while loop preventing it from doing so?
import keyboard
import multiprocessing
import time
class Example:
m_exit_signal = False
def __init__(self):
keyboard.add_hotkey('control, shift, 2', lambda: self.exit_signaled())
def execute_example_thread(self):
exit_status = self.m_exit_signal
# THREAD continues till exit is called! -
while exit_status == False:
time.sleep(5)
exit_status = self.m_exit_signal
print(exit_status)
def exit_signaled(self):
self.m_exit_signal = True
print("Status {0}".format(self.m_exit_signal))
example_objects = []
example_objects.append(Example())
example_objects.append(Example())
example_threads = []
for value in example_objects:
example_threads.append(multiprocessing.Process(target=value.execute_example_thread, args=()))
example_threads[-1].start()
Multiprocessing forks your code so that it runs in a separate process. In the code above the keyboard callback is calling the method in the instances present in the parent process. The loop (and a copy of the class instance) is actually running in a forked version in a child process. In order to signal the child, you need to share a variable between them and use it to pass data back and forth. Try the code below.
import keyboard
import multiprocessing as mp
import time
class Example(object):
def __init__(self, hot_key):
self.run = mp.Value('I', 1)
keyboard.add_hotkey('control, shift, %d' % hot_key, self.exit_signaled)
print("Initialized {}".format(mp.current_process().name))
def execute(self):
while self.run.value:
time.sleep(1)
print("Running {}".format(mp.current_process().name))
print("{} stopping".format(mp.current_process().name))
def exit_signaled(self):
print("exit signaled from {}".format(mp.current_process().name))
self.run.value = 0
p1 = mp.Process(target=Example(1).execute)
p1.start()
time.sleep(0.1)
p2 = mp.Process(target=Example(2).execute)
p2.start()
Here the parent and the child of each instance share an self.run = mp.Value To share data, you need to use one of these, not just any python variable.
I started learning python recently, and I am facing a situation that I do not even know if it is expected, or if something is wrong.
I am learning parallel threading to have two independent processes on the same program (UI control on one thread, image processing on another)
So, to test this I created this simple code:
(Camera is a custom class that connects to a usb webcam)
import thread
from vii.camera import Camera
class Process(object):
def __init__(self, width=800, height=600):
self._cam = Camera(width, height)
self._is_running = False
self._current_image = None
def start(self):
thread.start_new(self._run(), (self))
def _run(self):
self._cam.start()
self._is_running = True
while self._is_running:
self._current_image = self._cam.update()
self._current_image.show()
def get_image(self):
return self._current_image
def stop(self):
self._is_running = False
self._cam.close()
thread.exit()
process = Process()
process.start()
print("You will never see this output")
while (True):
key = raw_input()
if key == 'q':
process.stop()
break
The thread is created with success, and I am able to see the image. Now, I need to be able to affect it (stop it, get data from it) from the main thread. But the problem is that the code never enters in the while loop.
Is this behaviour expected? If it is, is there a way for me to achieve the functionality I need?
I've spent the past few days reading various threads about making tkinter thread-safe and running children without blocking the main thread. I thought I had arrived at a solution that allowed my code to run as I wanted it to, but now my main thread becomes non-responsive when my child process finishes. I can move the window around but the GUI part shows a loading cursor, whites out, and says "Not Responding" in the title of the window. I can let it sit like that forever and nothing will happen. I know what part of the code is causing the problem but I am not sure why it's causing the GUI to freeze. I'm using Windows.
I want my GUI to run another process using multiprocess. I have sys.stdout and sys.stderr routed to a queue and I use threading to create a thread that holds an automatic queue checker that updates the GUI every 100 ms so my GUI updates in "real time". I have tried every way of sending the child's stdout/stderr to the GUI and this is the only way that works the way I want it to (except for the freezing bit), so I would like to find out why it's freezing. Or I would like help setting up a proper way of sending the child's output to the GUI. I have tried every method I could find and I could not get them to work.
My main thread:
#### _______________IMPORT MODULES_________________###
import Tkinter
import multiprocessing
import sys
from threading import Thread
import qBMPchugger
###____________Widgets__________________###
class InputBox(Tkinter.Tk):
def __init__(self,parent):
Tkinter.Tk.__init__(self, parent)
self.parent = parent
self.initialize()
def initialize(self):
# Styles
self.grid()
# Approval
self.OKbutton = Tkinter.Button(self, text=u"OK", command=self.OKgo, anchor="e")
self.OKbutton.pack(side="right")
self.view = Tkinter.Text(self)
self.view.pack(side="left")
self.scroll = Tkinter.Scrollbar(self, orient=Tkinter.VERTICAL)
self.scroll.config(command=self.view.yview)
self.view.config(yscrollcommand=self.scroll.set)
self.scroll.pack(side="left")
def write(self, text):
self.view.insert("end", text)
def OKgo(self):
sys.stdout = self
sys.stderr = self
checker = Thread(target=self._update)
checker.daemon = True
checker.start()
self.view.delete(1.0, "end")
self.update_idletasks()
print("Loading user-specified inputs...")
path = "C:/"
inarg = (q, path)
print("Creating the program environment and importing modules...")
# Starts the text monitor to read output from the child process, BMPchugger
p = multiprocessing.Process(target=qBMPchugger.BMPcode, args=inarg)
p.daemon = 1
p.start()
def _update(self):
msg = q.get()
self.write(msg)
self.update_idletasks()
self.after(100, self._update)
if __name__ == "__main__":
app = InputBox(None)
app.title("File Inputs and Program Settings")
q = multiprocessing.Queue()
app.mainloop()
My child process (qBMPchugger):
#### _______________INITIALIZE_________________###
import os
import sys
import tkMessageBox
import Tkinter
class BadInput(Exception):
pass
def BMPcode(q, path):
# Create root for message boxes
boxRoot = Tkinter.Tk()
boxRoot.withdraw()
# Send outputs to the queue
class output:
def __init__(self, name, queue):
self.name = name
self.queue = queue
def write(self, msg):
self.queue.put(msg)
def flush(self):
sys.__stdout__.flush()
class error:
def __init__(self, name, queue):
self.name = name
self.queue = queue
def write(self, msg):
self.queue.put(msg)
def flush(self):
sys.__stderr__.flush()
sys.stdout = output(sys.stdout, q)
sys.stderr = error(sys.stderr, q)
print("Checking out the Spatial Analyst extension from GIS...")
# Check out extension and overwrite outputs
### _________________VERIFY INPUTS________________###
print("Checking validity of specified inputs...")
# Check that the provided file paths are valid
inputs = path
for i in inputs:
if os.path.exists(i):
pass
else:
message = "\nInvalid file path: {}\nCorrect the path name and try again.\n"
tkMessageBox.showerror("Invalid Path", message.format(i))
print message.format(i)
raise BadInput
print("Success!")
It's the part under # Send outputs to the queue (starting with the output class and ending with sys.stderr = error(sys.stderr, q)) that is causing my program to freeze. Why is that holding up my main thread when the child process finishes executing? EDIT: I think the freezing is being caused by the queue remaining open when the child process closes... or something. It's not the particular snippet of code like I thought it was. It happens even when I change the print statements to q.put("text") in either the parent or the child.
What is a better way to send the output to the queue? If you link me to a topic that answers my question, PLEASE show me how to implement it within my code. I have not been successful with anything I've found so far and chances are that I've already tried that particular solution and failed.
Use a manager list or dictionary to communicate between processes https://docs.python.org/2/library/multiprocessing.html#sharing-state-between-processes . You can have a process update the dictionary and send it to the GUI/some code outside the processes, and vice versa. The following is a simple, and a little sloppy, example of doing it both ways.
import time
from multiprocessing import Process, Manager
def test_f(test_d):
""" frist process to run
exit this process when dictionary's 'QUIT' == True
"""
test_d['2'] = 2 ## add as a test
while not test_d["QUIT"]:
print "P1 test_f", test_d["QUIT"]
test_d["ctr"] += 1
time.sleep(1.0)
def test_f2(test_d):
""" second process to run. Runs until the for loop exits
"""
for j in range(0, 10):
## print to show that changes made anywhere
## to the dictionary are seen by this process
print " P2", j, test_d
time.sleep(0.5)
print "second process finished"
if __name__ == '__main__':
##--- create a dictionary via Manager
manager = Manager()
test_d = manager.dict()
test_d["ctr"] = 0
test_d["QUIT"] = False
##--- start first process and send dictionary
p = Process(target=test_f, args=(test_d,))
p.start()
##--- start second process
p2 = Process(target=test_f2, args=(test_d,))
p2.start()
##--- sleep 2 seconds and then change dictionary
## to exit first process
time.sleep(2.0)
print "\nterminate first process"
test_d["QUIT"] = True
print "test_d changed"
print "dictionary updated by processes", test_d
##--- may not be necessary, but I always terminate to be sure
time.sleep(5.0)
p.terminate()
p2.terminate()
For my particular problem, the main thread was trying to read from the queue when the queue was empty and not having anything else put into it. I don't know the exact details as to why the main loop got hung up on that thread (self._update in my code) but changing _update to the following stopped making the GUI non-responsive when the child finished:
def _update(self):
if q.empty():
pass
else:
msg = q.get()
self.write(msg)
self.update_idletasks()
I have yet another question about Python multiprocessing.
I have a module that creates a Process and just runs in a while True loop.
This module is meant to be enabled/disabled from another Python module.
That other module will import the first one once and is also run as a process.
How would I better implement this?
so for a reference:
#foo.py
def foo():
while True:
if enabled:
#do something
p = Process(target=foo)
p.start()
and imagine second module to be something like that:
#bar.py
import foo, time
def bar():
while True:
foo.enable()
time.sleep(10)
foo.disable()
Process(target=bar).start()
Constantly running a process checking for condition inside a loop seems like a waste, but I would gladly accept the solution that just lets me set the enabled value from outside.
Ideally I would prefer to be able to terminate and restart the process, again from outside of this module.
From my understanding, I would use a Queue to pass commands to the Process. If it is indeed just that, can someone show me how to set it up in a way that I can add something to the queue from a different module.
Can this even be easily done with Python or is it time to abandon hope and switch to something like C or Java
I purposed in comment two different approches :
using a shared variable from multiprocessing.Value
pause / resume the process with signals
Control by sharing a variable
def target_process_1(run_statement):
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def target_process_2(run_statement):
time.sleep(3)
print "Stoping"
run_statement.value = False
time.sleep(3)
print "Resuming"
run_statement.value = True
if __name__ == "__main__":
run_statement = Value("i", 1)
process_1 = Process(target=target_process_1, args=(run_statement,))
process_2 = Process(target=target_process_2, args=(run_statement,))
process_1.start()
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Control by sending a signal
from multiprocessing import Process
import time
import os, signal
def target_process_1():
while True:
print "Running !"
time.sleep(1)
def target_process_2(target_pid):
time.sleep(3)
os.kill(target_pid, signal.SIGSTOP)
time.sleep(3)
os.kill(target_pid, signal.SIGCONT)
if __name__ == "__main__":
process_1 = Process(target=target_process_1)
process_1.start()
process_2 = Process(target=target_process_2, args=(process_1.pid,))
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Side note: if possible do not run a while True.
EDIT: if you want to manage your process in two different files, supposing you want to use a control by sharing a variable, this is a way to do.
# file foo.py
from multiprocessing import Value, Process
import time
__all__ = ['start', 'stop', 'pause', 'resume']
_statement = None
_process = None
def _target(run_statement):
""" Target of the foo's process """
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def start():
global _process, _statement
_statement = Value("i", 1)
_process = Process(target=_target, args=(_statement,))
_process.start()
def stop():
global _process, _statement
_process.terminate()
_statement, _process = None, _process
def enable():
_statement.value = True
def disable():
_statement.value = False