multiprocessing: a progress bar for each task - python

I have a class that implements an iterative algorithm. Since the execution takes a while and I need to average the results over many executions, I decided to use multiprocessing. The thing is that I would like to have a progress bar (or something less fancy) for every execution. Something like:
experiment 1 [##########] 60%
experiment 2 [#############] 70%
experiment 3 [###] 20%
My class looks like this (note that I already used a progress bar and that I would like to keep it there so that I continues to work when I do not parallelize):
from __future__ import division
from time import sleep
class Algo():
def __init__(self, total_iters, arg1, arg2, name):
self.total_iters = total_iters
self.arg1 = arg1
self.arg2 = arg2
self.name = name
def step(self, iteration):
"""
One iteration of Algorithm
"""
# Progress bar
completed = 100*iteration/self.total_iters
if completed == 0: print ""
print '\r {2} [{0}] {1}%'.format('#'*(int(completed/5)), completed, self.name),
# Do some stuff
sleep(0.001)
def run(self):
for i in xrange(self.total_iters):
self.step(i)
# Output the final result in unique file
And this is my try:
import multiprocessing as mp
if __name__ == "__main__":
algo1 = Algo(200, 0,0, "test1")
pool = mp.Pool(processes=3)
for i in xrange(3):
pool.apply_async(algo1.run) # in real life run will be passed N arguments
pool.close()
pool.join()
Any ideas?
PS: I am trying to avoid curses

Quick and dirty and in python 3, but you will get the idea ;)
import random
import time
import multiprocessing
import os
import collections
class Algo(multiprocessing.Process):
def __init__(self, steps, name, status_queue):
multiprocessing.Process.__init__(self)
self.steps = steps
self.name = name
self.status_queue = status_queue
def step(self, step):
# Progress bar
self.status_queue.put((self.name, (step+1.0)/self.steps))
# Do some stuff
time.sleep(0.1)
def run(self):
for i in range(self.steps):
self.step(i)
def print_progress(progress):
# Windows:
os.system('cls')
for name, percent in progress.items():
percent = int(percent * 100)
bar = ('#' * int(percent/10)) + (' ' * (10 - int(percent/10)))
print("{}: [{}] {}%".format(name, bar, percent))
if __name__ == "__main__":
status = multiprocessing.Queue()
progress = collections.OrderedDict()
algos = [Algo(random.randrange(100, 200), "test" + str(i), status) for i in range(3)]
for a in algos:
a.start()
while any([a.is_alive() for a in algos]):
while not status.empty():
name, percent = status.get()
progress[name] = percent
print_progress(progress)
time.sleep(0.1)

Related

How to use redlock using python

I have been working few days to understand the Redlock and I have seen that the performance to lock takes around 1 second which seems abit too much to just lock it in my opinion but I could be wrong.
I have created a small script:
redis_test.py
import serialized_redis
from pottery import Redlock
redis_connection = serialized_redis.MsgpackSerializedRedis(host='localhost', port=6379, db=0)
def lock(argument):
return Redlock(key=argument, auto_release_time=120 * 1000)
main.py
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import math
import random
import sys
import time
from threading import Thread
from loguru import logger
from lib.redis_test import lock, redis_connection
class StopWatch:
def __init__(self):
self.start()
def start(self):
self._startTime = time.time()
def getStartTime(self):
return self._startTime
def elapsed(self, prec=3):
prec = 3 if prec is None or not isinstance(prec, int) else prec
diff = time.time() - self._startTime
return self.round(diff, prec)
def round(self, n, p=0):
m = 10 ** p
return math.floor(n * m + 0.5) / m
def algorithm_with_lock(random_number):
print("Keys inside Redis", redis_connection.keys())
ourWatch = StopWatch()
ourWatch.start()
if not redis_connection.exists(f'redlock:{random_number}'):
# print("Time taken before redis_connection.exists", ourWatch.elapsed()) -> 0.0 seconds
with lock(f'{random_number}'):
print("Time taken before redis_connection.exists", ourWatch.elapsed()) # 1.002 seconds
time.sleep(5)
redis_connection.set("Hello_world", random.randint(1, 5))
return True
else:
return False
def main():
while True:
chosen_number = f"number_{random.randint(1, 3)}"
response = algorithm_with_lock(chosen_number)
if response:
logger.info(f"Yay, finished my job! -> {chosen_number}")
sys.exit()
else:
logger.debug(f"Trying new number! -> {chosen_number}")
time.sleep(1)
for i in range(1):
Thread(
target=main,
).start()
time.sleep(.1)
Issue with that is that it takes too long to actually lock a redis key which ends up that multiple threads can try to lock the same key which ends up being stuck in the lock tree. My guess is that it should not take 1 second to actually lock. But I could be wrong and here I am, I wonder what could be the reason of long time locking and if there is a chance im using it incorrectly?

how to get the output of the function used in Timer

I am running a flask application which shall call a function getSomething(input) every day at the same time. This function returns a string. I can not return a print() because I need to pass the string to another function.
When using the Timer function
t = Timer(secs, getSomething, args=[input])
I do not know how to aces the return value of the getSomething function.
example:
from datetime import datetime, timedelta
from threading import Timer
# This shall trigger something at a certain time^
x = datetime.today()
y = x + timedelta(seconds=5)
delta_t = y - x
secs = delta_t.seconds + 1
def getSomething(a):
b = a + " Hello World"
return b
s = "Test"
t = Timer(secs, getSomething, args=[s])
t.start()
I know the very same question has been asked here before. But I am not able to adapt it to my problem though I think i can not avoid the return.
The problem can be solved the following way:
from datetime import datetime, timedelta
from threading import Timer
d = [] # use list, dictionary, some class or queue.Queue to store data
def getSomething(a):
global d
d.append(a + " Hello World")
if __name__ == "__main__":
# This shall trigger something at a certain time^
x = datetime.today()
y = x + timedelta(seconds=5)
delta_t = y - x
secs = delta_t.seconds + 1
s = "Test"
t = Timer(secs, getSomething, args=[s])
t.start()
t.join() # need to wait until thread is finished to get new d value
print(d[0])
Another exmple how you can get data from another thread:
from threading import Thread
import time
class Storage:
def __init__(self):
self.storage = ""
class MyThread(Thread):
def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None):
super().__init__(group=group, target=target, name=name, daemon=daemon)
self.args = args
self.kwargs = kwargs
def run(self):
self.args[0].storage += "Hello!"
if __name__ == "__main__":
s = Storage()
branch = MyThread(args=(s,),)
branch.start()
time.sleep(1) # need some time to wait for another thread
print(s.storage)

Python threading in multi subclasses

I have a series of classes that inherit from in a series and parallel manner and I need to use Python threading for all classes when possible. An example is below. The problem is that the Build class does not get its run function executed which is a method in the Thread class. Threading works fine in MyThread class though. Any idea how to make the Build class starts as a thread?
from threading import Thread
from random import randint
import time
class Build(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
# This run function currently not being executed
for i in range(20):
print('Second series %i in thread' % (i))
time.sleep(1)
class MyThread(Build, Thread):
def __init__(self, val):
''' Constructor. '''
Thread.__init__(self)
Build.__init__(self)
self.val = val
def run(self):
for i in range(1, self.val):
print('Value %d in thread %s' % (i, self.getName()))
# Sleep for random time between 1 ~ 3 second
secondsToSleep = randint(1, 5)
print('%s sleeping fo %d seconds...' % (self.getName(), secondsToSleep))
time.sleep(secondsToSleep)
# Run following code when the program starts
if __name__ == '__main__':
# Declare objects of MyThread class
myThreadOb1 = MyThread(4)
myThreadOb1.setName('Thread 1')
myThreadOb2 = MyThread(4)
myThreadOb2.setName('Thread 2')
# Start running the threads!
myThreadOb1.start()
myThreadOb2.start()
# Wait for the threads to finish...
myThreadOb1.join()
myThreadOb2.join()
print('Main Terminating...')`
FYI: Instead of subclassing threading.Thread the better way to achieve what you want is to make your class instances Callable and just pass them to the target keyword arg of the Thread class' constructor. The advantage of doing this is you can pass in additional arguments to each Thread instance.
going with your sample code.
class MyThread(Build):
def __init__(self):
''' Constructor. '''
Build.__init__(self)
self.val = val
# this allows your class to be a callable.
def __call__(self, val):
for i in range(1, val):
print('Value %d in thread %s' % (i, self.getName()))
# Sleep for random time between 1 ~ 3 second
secondsToSleep = randint(1, 5)
print('%s sleeping fo %d seconds...' % (self.getName(), secondsToSleep))
time.sleep(secondsToSleep)
# Run following code when the program starts
if __name__ == '__main__':
# Declare objects of MyThread class
myThreadObj1 = MyThread()
myThread1 = Thread(target=myThreadOb1, args=(4))
myThread1.start()

Printing an update line whenever a subprocess finishes in Python 3's multiprocessing Pool

I'm using Python's multiprocessing library to process a list of inputs with the built-in map() method. Here's the relevant code segment:
subp_pool = Pool(self.subprocesses)
cases = subp_pool.map(self.get_case, input_list)
return cases
The function to be run in parallel is self.get_case(), and the list of inputs is input_list.
I wish to print a progress prompt to the standard output in the following format:
Working (25/100 cases processed)
How can I update a local variable inside the class that contains the Pool, so that whenever a subprocess finishes, the variable is incremented by 1 (and then printed to the standard output)?
There's no way to do this using multiprocessing.map, because it doesn't alert the main process about anything until it's completed all its tasks. However, you can get similar behavior by using apply_async in tandem with the callback keyword argument:
from multiprocessing.dummy import Pool
from functools import partial
import time
class Test(object):
def __init__(self):
self.count = 0
self.threads = 4
def get_case(self, x):
time.sleep(x)
def callback(self, total, x):
self.count += 1
print("Working ({}/{}) cases processed.".format(self.count, total))
def do_async(self):
thread_pool = Pool(self.threads)
input_list = range(5)
callback = partial(self.callback, len(input_list))
tasks = [thread_pool.apply_async(self.get_case, (x,),
callback=callback) for x in input_list]
return [task.get() for task in tasks]
if __name__ == "__main__":
t = Test()
t.do_async()
Call the print_data() from the get_case() method and you are done.
from threading import Lock
Class A(object):
def __init__(self):
self.mutex = Lock()
self.count = 0
def print_data(self):
self.mutex.acquire()
try:
self.count += 1
print('Working (' + str(self.count) + 'cases processed)')
finally:
self.mutex.release()

Python Equivalent of setInterval()?

Does Python have a function similar to JavaScript's setInterval()?
I would like to have:
def set_interval(func, interval):
...
That will call func every interval time units.
This might be the correct snippet you were looking for:
import threading
def set_interval(func, sec):
def func_wrapper():
set_interval(func, sec)
func()
t = threading.Timer(sec, func_wrapper)
t.start()
return t
This is a version where you could start and stop.
It is not blocking.
There is also no glitch as execution time error is not added (important for long time execution with very short interval as audio for example)
import time, threading
StartTime=time.time()
def action() :
print('action ! -> time : {:.1f}s'.format(time.time()-StartTime))
class setInterval :
def __init__(self,interval,action) :
self.interval=interval
self.action=action
self.stopEvent=threading.Event()
thread=threading.Thread(target=self.__setInterval)
thread.start()
def __setInterval(self) :
nextTime=time.time()+self.interval
while not self.stopEvent.wait(nextTime-time.time()) :
nextTime+=self.interval
self.action()
def cancel(self) :
self.stopEvent.set()
# start action every 0.6s
inter=setInterval(0.6,action)
print('just after setInterval -> time : {:.1f}s'.format(time.time()-StartTime))
# will stop interval in 5s
t=threading.Timer(5,inter.cancel)
t.start()
Output is :
just after setInterval -> time : 0.0s
action ! -> time : 0.6s
action ! -> time : 1.2s
action ! -> time : 1.8s
action ! -> time : 2.4s
action ! -> time : 3.0s
action ! -> time : 3.6s
action ! -> time : 4.2s
action ! -> time : 4.8s
Just keep it nice and simple.
import threading
def setInterval(func,time):
e = threading.Event()
while not e.wait(time):
func()
def foo():
print "hello"
# using
setInterval(foo,5)
# output:
hello
hello
.
.
.
EDIT : This code is non-blocking
import threading
class ThreadJob(threading.Thread):
def __init__(self,callback,event,interval):
'''runs the callback function after interval seconds
:param callback: callback function to invoke
:param event: external event for controlling the update operation
:param interval: time in seconds after which are required to fire the callback
:type callback: function
:type interval: int
'''
self.callback = callback
self.event = event
self.interval = interval
super(ThreadJob,self).__init__()
def run(self):
while not self.event.wait(self.interval):
self.callback()
event = threading.Event()
def foo():
print "hello"
k = ThreadJob(foo,event,2)
k.start()
print "It is non-blocking"
Change Nailxx's answer a bit and you got the answer!
from threading import Timer
def hello():
print "hello, world"
Timer(30.0, hello).start()
Timer(30.0, hello).start() # after 30 seconds, "hello, world" will be printed
The sched module provides these abilities for general Python code. However, as its documentation suggests, if your code is multithreaded it might make more sense to use the threading.Timer class instead.
I think this is what you're after:
#timertest.py
import sched, time
def dostuff():
print "stuff is being done!"
s.enter(3, 1, dostuff, ())
s = sched.scheduler(time.time, time.sleep)
s.enter(3, 1, dostuff, ())
s.run()
If you add another entry to the scheduler at the end of the repeating method, it'll just keep going.
I use sched to create setInterval function gist
import functools
import sched, time
s = sched.scheduler(time.time, time.sleep)
def setInterval(sec):
def decorator(func):
#functools.wraps(func)
def wrapper(*argv, **kw):
setInterval(sec)(func)
func(*argv, **kw)
s.enter(sec, 1, wrapper, ())
return wrapper
s.run()
return decorator
#setInterval(sec=3)
def testInterval():
print ("test Interval ")
testInterval()
Simple setInterval utils
from threading import Timer
def setInterval(timer, task):
isStop = task()
if not isStop:
Timer(timer, setInterval, [timer, task]).start()
def hello():
print "do something"
return False # return True if you want to stop
if __name__ == "__main__":
setInterval(2.0, hello) # every 2 seconds, "do something" will be printed
The above method didn't quite do it for me as I needed to be able to cancel the interval. I turned the function into a class and came up with the following:
class setInterval():
def __init__(self, func, sec):
def func_wrapper():
self.t = threading.Timer(sec, func_wrapper)
self.t.start()
func()
self.t = threading.Timer(sec, func_wrapper)
self.t.start()
def cancel(self):
self.t.cancel()
Most of the answers above do not shut down the Thread properly. While using Jupyter notebook I noticed that when an explicit interrupt was sent, the threads were still running and worse, they would keep multiplying starting at 1 thread running,2, 4 etc. My method below is based on the answer by #doom but cleanly handles interrupts by running an infinite loop in the Main thread to listen for SIGINT and SIGTERM events
No drift
Cancelable
Handles SIGINT and SIGTERM very well
Doesnt make a new thread for every run
Feel free to suggest improvements
import time
import threading
import signal
# Record the time for the purposes of demonstration
start_time=time.time()
class ProgramKilled(Exception):
"""
An instance of this custom exception class will be thrown everytime we get an SIGTERM or SIGINT
"""
pass
# Raise the custom exception whenever SIGINT or SIGTERM is triggered
def signal_handler(signum, frame):
raise ProgramKilled
# This function serves as the callback triggered on every run of our IntervalThread
def action() :
print('action ! -> time : {:.1f}s'.format(time.time()-start_time))
# https://stackoverflow.com/questions/2697039/python-equivalent-of-setinterval
class IntervalThread(threading.Thread) :
def __init__(self,interval,action, *args, **kwargs) :
super(IntervalThread, self).__init__()
self.interval=interval
self.action=action
self.stopEvent=threading.Event()
self.start()
def run(self) :
nextTime=time.time()+self.interval
while not self.stopEvent.wait(nextTime-time.time()) :
nextTime+=self.interval
self.action()
def cancel(self) :
self.stopEvent.set()
def main():
# Handle SIGINT and SIFTERM with the help of the callback function
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
# start action every 1s
inter=IntervalThread(1,action)
print('just after setInterval -> time : {:.1f}s'.format(time.time()-start_time))
# will stop interval in 500s
t=threading.Timer(500,inter.cancel)
t.start()
# https://www.g-loaded.eu/2016/11/24/how-to-terminate-running-python-threads-using-signals/
while True:
try:
time.sleep(1)
except ProgramKilled:
print("Program killed: running cleanup code")
inter.cancel()
break
if __name__ == "__main__":
main()
In the above solutions if a situation arises where program is shutdown, there is no guarantee that it will shutdown gracefully,Its always recommended to shut a program via a soft kill, neither did most of them have a function to stop I found a nice article on medium written by Sankalp which solves both of these issues (run periodic tasks in python) refer the attached link to get a deeper insight.
In the below sample a library named signal is used to track the kill is soft kill or a hard kill
import threading, time, signal
from datetime import timedelta
WAIT_TIME_SECONDS = 1
class ProgramKilled(Exception):
pass
def foo():
print time.ctime()
def signal_handler(signum, frame):
raise ProgramKilled
class Job(threading.Thread):
def __init__(self, interval, execute, *args, **kwargs):
threading.Thread.__init__(self)
self.daemon = False
self.stopped = threading.Event()
self.interval = interval
self.execute = execute
self.args = args
self.kwargs = kwargs
def stop(self):
self.stopped.set()
self.join()
def run(self):
while not self.stopped.wait(self.interval.total_seconds()):
self.execute(*self.args, **self.kwargs)
if __name__ == "__main__":
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
job = Job(interval=timedelta(seconds=WAIT_TIME_SECONDS), execute=foo)
job.start()
while True:
try:
time.sleep(1)
except ProgramKilled:
print "Program killed: running cleanup code"
job.stop()
break
#output
#Tue Oct 16 17:47:51 2018
#Tue Oct 16 17:47:52 2018
#Tue Oct 16 17:47:53 2018
#^CProgram killed: running cleanup code
setInterval should be run on multiple thread, and not freeze the task when it running loop.
Here is my RUNTIME package that support multithread feature:
setTimeout(F,ms) : timming to fire function in independence thread.
delayF(F,ms) : similar setTimeout(F,ms).
setInterval(F,ms) : asynchronous loop
.pause, .resume : pause and resume the interval
clearInterval(interval) : clear the interval
It's short and simple. Note that python need lambda if you input direct the function, but lambda is not support command block, so you should define the function content before put it in the setInterval.
### DEMO PYTHON MULTITHREAD ASYNCHRONOUS LOOP ###
import time;
import threading;
import random;
def delay(ms):time.sleep(ms/1000); # Controil while speed
def setTimeout(R,delayMS):
t=threading.Timer(delayMS/1000,R)
t.start();
return t;
def delayF(R,delayMS):
t=threading.Timer(delayMS/1000,R)
t.start();
return t;
class THREAD:
def __init__(this):
this.R_onRun=None;
this.thread=None;
def run(this):
this.thread=threading.Thread(target=this.R_onRun);
this.thread.start();
def isRun(this): return this.thread.isAlive();
class setInterval :
def __init__(this,R_onRun,msInterval) :
this.ms=msInterval;
this.R_onRun=R_onRun;
this.kStop=False;
this.thread=THREAD();
this.thread.R_onRun=this.Clock;
this.thread.run();
def Clock(this) :
while not this.kStop :
this.R_onRun();
delay(this.ms);
def pause(this) :
this.kStop=True;
def stop(this) :
this.kStop=True;
def resume(this) :
if (this.kStop) :
this.kStop=False;
this.thread.run();
def clearInterval(Timer): Timer.stop();
# EXAMPLE
def p():print(random.random());
tm=setInterval(p,20);
tm2=setInterval(lambda:print("AAAAA"),20);
delayF(tm.pause,1000);
delayF(tm.resume,2000);
delayF(lambda:clearInterval(tm),3000);
Save to file .py and run it. You will see it print both random number and string "AAAAA". The print number thread will pause printing after 1 second and resume print again for 1 second then stop, while the print string keep printing text not corrupt.
In case you use OpenCV for graphic animation with those setInterval for boost animate speed, you must have 1 main thread to apply waitKey, otherwise the window will freeze no matter how slow delay or you applied waitKey in sub thread:
def p:... # Your drawing task
setInterval(p,1); # Subthread1 running draw
setInterval(p,1); # Subthread2 running draw
setInterval(p,1); # Subthread3 running draw
while True: cv2.waitKey(10); # Main thread which waitKey have effect
You can also try out this method:
import time
while True:
time.sleep(5)
print("5 seconds has passed")
So it will print "5 seconds has passed" every 5 seconds.
The function sleep() suspends execution for the given number of seconds. The argument may be a floating point number to indicate a more precise sleep time.
Recently, I have the same issue as you. And I find these soluation:
1. you can use the library: threading.Time(this have introduction above)
2. you can use the library: sched(this have introduction above too)
3. you can use the library: Advanced Python Scheduler(Recommend)
Some answers above that uses func_wrapper and threading.Timer indeed work, except that it spawns a new thread every time an interval is called, which is causing memory problems.
The basic example below roughly implemented a similar mechanism by putting interval on a separate thread. It sleeps at the given interval. Before jumping into code, here are some of the limitations that you need to be aware of:
JavaScript is single threaded, so when the function inside setInterval is fired, nothing else will be working at the same time (excluding worker thread, but let's talk general use case of setInterval. Therefore, threading is safe. But here in this implementation, you may encounter race conditions unless using a threading.rLock.
The implementation below uses time.sleep to simulate intervals, but adding the execution time of func, the total time for this interval may be greater than what you expect. So depending on use cases, you may want to "sleep less" (minus time taken for calling func)
I only roughly tested this, and you should definitely not use global variables the way I did, feel free to tweak it so that it fits in your system.
Enough talking, here is the code:
# Python 2.7
import threading
import time
class Interval(object):
def __init__(self):
self.daemon_alive = True
self.thread = None # keep a reference to the thread so that we can "join"
def ticktock(self, interval, func):
while self.daemon_alive:
time.sleep(interval)
func()
num = 0
def print_num():
global num
num += 1
print 'num + 1 = ', num
def print_negative_num():
global num
print '-num = ', num * -1
intervals = {} # keep track of intervals
g_id_counter = 0 # roughly generate ids for intervals
def set_interval(interval, func):
global g_id_counter
interval_obj = Interval()
# Put this interval on a new thread
t = threading.Thread(target=interval_obj.ticktock, args=(interval, func))
t.setDaemon(True)
interval_obj.thread = t
t.start()
# Register this interval so that we can clear it later
# using roughly generated id
interval_id = g_id_counter
g_id_counter += 1
intervals[interval_id] = interval_obj
# return interval id like it does in JavaScript
return interval_id
def clear_interval(interval_id):
# terminate this interval's while loop
intervals[interval_id].daemon_alive = False
# kill the thread
intervals[interval_id].thread.join()
# pop out the interval from registry for reusing
intervals.pop(interval_id)
if __name__ == '__main__':
num_interval = set_interval(1, print_num)
neg_interval = set_interval(3, print_negative_num)
time.sleep(10) # Sleep 10 seconds on main thread to let interval run
clear_interval(num_interval)
clear_interval(neg_interval)
print "- Are intervals all cleared?"
time.sleep(3) # check if both intervals are stopped (not printing)
print "- Yup, time to get beers"
Expected output:
num + 1 = 1
num + 1 = 2
-num = -2
num + 1 = 3
num + 1 = 4
num + 1 = 5
-num = -5
num + 1 = 6
num + 1 = 7
num + 1 = 8
-num = -8
num + 1 = 9
num + 1 = 10
-num = -10
Are intervals all cleared?
Yup, time to get beers
My Python 3 module jsinterval.py will be helpful! Here it is:
"""
Threaded intervals and timeouts from JavaScript
"""
import threading, sys
__all__ = ['TIMEOUTS', 'INTERVALS', 'setInterval', 'clearInterval', 'setTimeout', 'clearTimeout']
TIMEOUTS = {}
INTERVALS = {}
last_timeout_id = 0
last_interval_id = 0
class Timeout:
"""Class for all timeouts."""
def __init__(self, func, timeout):
global last_timeout_id
last_timeout_id += 1
self.timeout_id = last_timeout_id
TIMEOUTS[str(self.timeout_id)] = self
self.func = func
self.timeout = timeout
self.threadname = 'Timeout #%s' %self.timeout_id
def run(self):
func = self.func
delx = self.__del__
def func_wrapper():
func()
delx()
self.t = threading.Timer(self.timeout/1000, func_wrapper)
self.t.name = self.threadname
self.t.start()
def __repr__(self):
return '<JS Timeout set for %s seconds, launching function %s on timeout reached>' %(self.timeout, repr(self.func))
def __del__(self):
self.t.cancel()
class Interval:
"""Class for all intervals."""
def __init__(self, func, interval):
global last_interval_id
self.interval_id = last_interval_id
INTERVALS[str(self.interval_id)] = self
last_interval_id += 1
self.func = func
self.interval = interval
self.threadname = 'Interval #%s' %self.interval_id
def run(self):
func = self.func
interval = self.interval
def func_wrapper():
timeout = Timeout(func_wrapper, interval)
self.timeout = timeout
timeout.run()
func()
self.t = threading.Timer(self.interval/1000, func_wrapper)
self.t.name = self.threadname
self.t.run()
def __repr__(self):
return '<JS Interval, repeating function %s with interval %s>' %(repr(self.func), self.interval)
def __del__(self):
self.timeout.__del__()
def setInterval(func, interval):
"""
Create a JS Interval: func is the function to repeat, interval is the interval (in ms)
of executing the function.
"""
temp = Interval(func, interval)
temp.run()
idx = int(temp.interval_id)
del temp
return idx
def clearInterval(interval_id):
try:
INTERVALS[str(interval_id)].__del__()
del INTERVALS[str(interval_id)]
except KeyError:
sys.stderr.write('No such interval "Interval #%s"\n' %interval_id)
def setTimeout(func, timeout):
"""
Create a JS Timeout: func is the function to timeout, timeout is the timeout (in ms)
of executing the function.
"""
temp = Timeout(func, timeout)
temp.run()
idx = int(temp.timeout_id)
del temp
return idx
def clearTimeout(timeout_id):
try:
TIMEOUTS[str(timeout_id)].__del__()
del TIMEOUTS[str(timeout_id)]
except KeyError:
sys.stderr.write('No such timeout "Timeout #%s"\n' %timeout_id)
CODE EDIT:
Fixed the memory leak (spotted by #benjaminz). Now ALL threads are cleaned up upon end. Why does this leak happen? It happens because of the implicit (or even explicit) references. In my case, TIMEOUTS and INTERVALS. Timeouts self-clean automatically (after this patch) because they use function wrapper which calls the function and then self-kills. But how does this happen? Objects can't be deleted from memory unless all references are deleted too or gc module is used. Explaining: there's no way to create (in my code) unwanted references to timeouts/intervals. They have only ONE referrer: the TIMEOUTS/INTERVALS dicts. And, when interrupted or finished (only timeouts can finish uninterrupted) they delete the only existing reference to themselves: their corresponding dict element. Classes are perfectly encapsulated using __all__, so no space for memory leaks.
Here is a low time drift solution that uses a thread to periodically signal an Event object. The thread's run() does almost nothing while waiting for a timeout; hence the low time drift.
# Example of low drift (time) periodic execution of a function.
import threading
import time
# Thread that sets 'flag' after 'timeout'
class timerThread (threading.Thread):
def __init__(self , timeout , flag):
threading.Thread.__init__(self)
self.timeout = timeout
self.stopFlag = False
self.event = threading.Event()
self.flag = flag
# Low drift run(); there is only the 'if'
# and 'set' methods between waits.
def run(self):
while not self.event.wait(self.timeout):
if self.stopFlag:
break
self.flag.set()
def stop(self):
stopFlag = True
self.event.set()
# Data.
printCnt = 0
# Flag to print.
printFlag = threading.Event()
# Create and start the timer thread.
printThread = timerThread(3 , printFlag)
printThread.start()
# Loop to wait for flag and print time.
while True:
global printCnt
# Wait for flag.
printFlag.wait()
# Flag must be manually cleared.
printFlag.clear()
print(time.time())
printCnt += 1
if printCnt == 3:
break;
# Stop the thread and exit.
printThread.stop()
printThread.join()
print('Done')
fall asleep until the next interval of seconds length starts: (not concurrent)
def sleep_until_next_interval(self, seconds):
now = time.time()
fall_asleep = seconds - now % seconds
time.sleep(fall_asleep)
while True:
sleep_until_next_interval(10) # 10 seconds - worktime
# work here
simple and no drift.
I have written my code to make a very very flexible setInterval in python. Here you are:
import threading
class AlreadyRunning(Exception):
pass
class IntervalNotValid(Exception):
pass
class setInterval():
def __init__(this, func=None, sec=None, args=[]):
this.running = False
this.func = func # the function to be run
this.sec = sec # interval in second
this.Return = None # The returned data
this.args = args
this.runOnce = None # asociated with run_once() method
this.runOnceArgs = None # asociated with run_once() method
if (func is not None and sec is not None):
this.running = True
if (not callable(func)):
raise TypeError("non-callable object is given")
if (not isinstance(sec, int) and not isinstance(sec, float)):
raise TypeError("A non-numeric object is given")
this.TIMER = threading.Timer(this.sec, this.loop)
this.TIMER.start()
def start(this):
if (not this.running):
if (not this.isValid()):
raise IntervalNotValid("The function and/or the " +
"interval hasn't provided or invalid.")
this.running = True
this.TIMER = threading.Timer(this.sec, this.loop)
this.TIMER.start()
else:
raise AlreadyRunning("Tried to run an already run interval")
def stop(this):
this.running = False
def isValid(this):
if (not callable(this.func)):
return False
cond1 = not isinstance(this.sec, int)
cond2 = not isinstance(this.sec, float)
if (cond1 and cond2):
return False
return True
def loop(this):
if (this.running):
this.TIMER = threading.Timer(this.sec, this.loop)
this.TIMER.start()
function_, Args_ = this.func, this.args
if (this.runOnce is not None): # someone has provide the run_once
runOnce, this.runOnce = this.runOnce, None
result = runOnce(*(this.runOnceArgs))
this.runOnceArgs = None
# if and only if the result is False. not accept "None"
# nor zero.
if (result is False):
return # cancel the interval right now
this.Return = function_(*Args_)
def change_interval(this, sec):
cond1 = not isinstance(sec, int)
cond2 = not isinstance(sec, float)
if (cond1 and cond2):
raise TypeError("A non-numeric object is given")
# prevent error when providing interval to a blueprint
if (this.running):
this.TIMER.cancel()
this.sec = sec
# prevent error when providing interval to a blueprint
# if the function hasn't provided yet
if (this.running):
this.TIMER = threading.Timer(this.sec, this.loop)
this.TIMER.start()
def change_next_interval(this, sec):
if (not isinstance(sec, int) and not isinstance(sec, float)):
raise TypeError("A non-numeric object is given")
this.sec = sec
def change_func(this, func, args=[]):
if (not callable(func)):
raise TypeError("non-callable object is given")
this.func = func
this.args = args
def run_once(this, func, args=[]):
this.runOnce = func
this.runOnceArgs = args
def get_return(this):
return this.Return
You can get many features and flexibility. Running this code won't freeze your code, you can change the interval at run time, you can change the function at run time, you can pass arguments, you can get the returned object from your function, and many more. You can make your tricks too!
here's a very simple and basic example to use it:
import time
def interval(name="world"):
print(f"Hello {name}!")
# function named interval will be called every two seconds
# output: "Hello world!"
interval1 = setInterval(interval, 2)
# function named interval will be called every 1.5 seconds
# output: "Hello Jane!"
interval2 = setInterval(interval, 1.5, ["Jane"])
time.sleep(5) #stop all intervals after 5 seconds
interval1.stop()
interval2.stop()
Check out my Github project to see more examples and follow next updates :D
https://github.com/Hzzkygcs/setInterval-python
Here's something easy peazy:
import time
delay = 10 # Seconds
def setInterval():
print('I print in intervals!')
time.sleep(delay)
setInterval()
Things work differently in Python: you need to either sleep() (if you want to block the current thread) or start a new thread. See http://docs.python.org/library/threading.html
From Python Documentation:
from threading import Timer
def hello():
print "hello, world"
t = Timer(30.0, hello)
t.start() # after 30 seconds, "hello, world" will be printed

Categories