How can I simultaneously run the following code OR run the TTS function after returning the text?
CODE:
def main(q):
# CODE BEFORE THIS.
# TTS IS JUST A SIMPLE TEXT TO SPEECH FUNCTION
time.sleep(random.uniform(0.5, 2))
response = 'BOT: '+ response
# TTS
# SIMULTANEOUSLY RUN BELOW
if(responsetts!=None):
tts(responsetts)
else:
tts(response)
return response
if __name__ == '__main__':
while True:
query=input('U: ')
print(main(query))
The simple solution in case you want your tts function to run after the response was printed would be, to just let main print out response before calling tts. But for more flexibility and better responsiveness of your prompt you can use a separate thread for your tts call.
The threading module offers a Timer, which is a subclass of Thread. Timer has an interval parameter for adding a sleep before the target function gets executed. You could use this to add a delay if you want, or just use Thread if you don't need this feature. I use espeak in my example instead of tts:
import time
import random
import subprocess
from threading import Timer
from functools import partial
def _espeak(msg):
# Speak slowly in a female english voice
cmd = ["espeak", '-s130', '-ven+f5', msg]
subprocess.run(cmd)
def _vocalize(response, responsetts=None, interval=0):
# "Comparisons to singletons like None should always be done with is or
# is not, never the equality operators." -PEP 8
if responsetts is not None:
response = responsetts
Timer(interval=interval, function=_espeak, args=(response,)).start()
def _get_response(q):
time.sleep(random.uniform(0.5, 2))
response = '42'
response = 'BOT: '+ response
return response
def _handle_query(q):
response = _get_response(q)
print(response)
_vocalize(response, interval=0)
def main():
prompt = partial(input, 'U: ')
# alternative to using partial: iter(lambda: input('U: '), 'q')
for query in iter(prompt, 'q'): # quits on input 'q'
_handle_query(query)
if __name__ == '__main__':
main()
Related
I have two scripts, new.py and test.py.
Test.py
import time
while True:
x = "hello"
time.sleep(1)
x = "world"
time.sleep(1)
new.py
import time
while True:
import test
x = test.x
print(x)
time.sleep(1)
Now from my understanding this should print "hello" and a second later "world" all the time when executing new.py.
It does not print anything, how can i fix that?
Thanks
I think the code below captures what you are asking. Here I simulate two scripts running independently (by using threads), then show how you can use shelve to communicate between them. Note, there are likely much better ways to get to what you are after -- but if you absolutely must run the scripts independently, this will work for you.
Incidentally, any persistent source would do (such as a database).
import shelve
import time
import threading
def script1():
while True:
with shelve.open('my_store') as holder3:
if holder3['flag'] is not None: break
print('waiting')
time.sleep(1)
print("Done")
def script2():
print("writing")
with shelve.open('my_store') as holder2:
holder2['flag'] = 1
if __name__ == "__main__":
with shelve.open('my_store') as holder1:
holder1['flag'] = None
t = threading.Thread(target=script1)
t.start()
time.sleep(5)
script2()
t.join()
Yields:
waiting
waiting
waiting
waiting
waiting
writing
Done
Test.py
import time
def hello():
callList = ['hello', 'world']
for item in callList:
print item
time.sleep(1)
hello()
new.py
from parent import hello
while True:
hello()
So I have been playing around with Captcha, Multiprocessing and Flask.
What have I done so far:
Basically what I have done is that I have created own Mutltiprocessing where I enter how many tasks/processers I want to run during this script. It will say if I enter 3 then it will give me a process of 3 "threads" which works fine.
Also whenever webbrowser.open('http://Thrillofit.baller.com:5000/solve') opens, It works aswell. Also get a captcha that is able to solve.
Basically meaning the captcha does work and also the multiprocessing:
What's the issue then?
The issue is sitting now whenever I run the program it gets stuck when solving, basically I can solve how many captchas as I want but it will not continue which I can't get a grip of why? A GIF pic of what is happening: https://i.gyazo.com/d5f183471f20be5eda6be939d255a157.mp4
In the video you can see that i am trying to solve the captcha but nothing happeneds to the program like it gets stuck.
MY thoughts is that it might be a issue between Multiprocessing and Captcha/flask but I can't see the issue and I have been completely blind at this point. Maybe there is someone that might see the problem?
The goals is to whenever I solve a captcha, It should print out a token (Inside the def passwd(): method but nothing prints out and just gets stuck pretty much...
import requests, time, json, re, sys, os, timeit, random, multiprocessing, traceback, threading, names, logging, webbrowser, _thread
from flask import Flask, render_template, request, redirect
tokens = []
def captureToken(token):
expiry = datetime.now().timestamp() + 115
tokenDict = {
'expiry': expiry,
'token': token
}
tokens.append(tokenDict)
return
def sendToken():
while not tokens:
pass
token = tokens.pop(0)
return token['token']
def manageTokens():
while True:
for item in tokens:
if item['expiry'] < datetime.now().timestamp():
tokens.remove(item)
time.sleep(5)
app = Flask(__name__)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
#app.route('/', methods=['GET'])
def base():
return redirect("http://Thrillofit.baller.com:5000/solve", code=302)
#app.route('/solve', methods=['GET'])
def solve():
sitekey = "6LdyFRkUAAAAAF2YmQ9baZ6ytpVnbVSAymVpTXKi"
return render_template('index.html', sitekey=sitekey)
#app.route('/submit', methods=['POST'])
def submit():
token = request.form.get('g-recaptcha-response', '')
captureToken(token)
return redirect("http://Thrillofit.baller.com:5000/solve", code=302)
def info(thread):
global prod
prod = int(thread) + 1
passwd()
def passwd():
lilcapttoken = sendToken()
print(lilcaptoken)
def main():
webbrowser.open('http://Thrillofit.baller.com:5000/solve')
user_input = 0
while True:
try:
user_input = int(input(Fore.WHITE + 'How many tasks do you wanna run? [NUMBERS] \n' + Fore.RESET))
except ValueError:
print(Fore.RED + "Stop being stupid" + Fore.RESET)
continue
else:
HowManyThread = user_input
print()
i = 0
jobs = []
for i in range(HowManyThread):
p = multiprocessing.Process(target=info, args=(str(i),))
jobs.append(p)
time.sleep(.5)
p.start()
for p in jobs:
p.join()
sys.exit()
if __name__ == '__main__':
try:
_thread.start_new_thread(app.run, ())
_thread.start_new_thread(manageTokens, ())
main()
except Exception as e:
print(e)
print(traceback.print_exc())
print(traceback)
I'm writing a small, single function that is designed to request user input with a time delay. When the time delay runs out, the function should return None instead of a user's response and then should continue with the rest of the script.
In the current implementation, the user input works and the timeout works, with the timeout message printed by a signal handler function which is defined within the function (I'm aiming to have this outer function fairly self-contained). However, processing then halts (rather than exiting the while loop defined in the main function) and I'm not sure why.
How can I get processing to continue? Am I misusing signal in some way? Could a lambda be used in place of an explicitly-defined function for the handler function?
#!/usr/bin/env python
from __future__ import print_function
import signal
import propyte
def main():
response = "yes"
while response is not None:
response = get_input_nonblocking(
prompt = "ohai? ",
timeout = 5
)
print("start non-response procedures")
# do things
def get_input_nonblocking(
prompt = "",
timeout = 5,
message_timeout = "prompt timeout"
):
def timeout_manager(signum, frame):
print(message_timeout)
#signal.signal(signal.SIGALRM, lambda: print(message_timeout))
signal.signal(signal.SIGALRM, timeout_manager)
signal.alarm(timeout)
try:
response = propyte.get_input(prompt)
return response
except:
return None
if __name__ == '__main__':
main()
What you've got is almost there, but you need to raise an exception inside your signal handler. raw_input will block until something happens, either input or an exception. If you raise an exception in the signal handler, that will then interrupt raw_input and execution will fall into the except in your get_input_non_blocking function. Here's a toy example.
import signal
def timeout(signum, frame):
raise IOError("bye!")
signal.signal(signal.SIGALRM, timeout)
def input():
try:
print("omgplz: ")
return raw_input()
except IOError:
return None
signal.alarm(5)
txt = input()
signal.alarm(0)
print(txt)
There's some more discussion and an alternative approach using select in this answer here: Keyboard input with timeout in Python
Hope that helps!
I have yet another question about Python multiprocessing.
I have a module that creates a Process and just runs in a while True loop.
This module is meant to be enabled/disabled from another Python module.
That other module will import the first one once and is also run as a process.
How would I better implement this?
so for a reference:
#foo.py
def foo():
while True:
if enabled:
#do something
p = Process(target=foo)
p.start()
and imagine second module to be something like that:
#bar.py
import foo, time
def bar():
while True:
foo.enable()
time.sleep(10)
foo.disable()
Process(target=bar).start()
Constantly running a process checking for condition inside a loop seems like a waste, but I would gladly accept the solution that just lets me set the enabled value from outside.
Ideally I would prefer to be able to terminate and restart the process, again from outside of this module.
From my understanding, I would use a Queue to pass commands to the Process. If it is indeed just that, can someone show me how to set it up in a way that I can add something to the queue from a different module.
Can this even be easily done with Python or is it time to abandon hope and switch to something like C or Java
I purposed in comment two different approches :
using a shared variable from multiprocessing.Value
pause / resume the process with signals
Control by sharing a variable
def target_process_1(run_statement):
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def target_process_2(run_statement):
time.sleep(3)
print "Stoping"
run_statement.value = False
time.sleep(3)
print "Resuming"
run_statement.value = True
if __name__ == "__main__":
run_statement = Value("i", 1)
process_1 = Process(target=target_process_1, args=(run_statement,))
process_2 = Process(target=target_process_2, args=(run_statement,))
process_1.start()
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Control by sending a signal
from multiprocessing import Process
import time
import os, signal
def target_process_1():
while True:
print "Running !"
time.sleep(1)
def target_process_2(target_pid):
time.sleep(3)
os.kill(target_pid, signal.SIGSTOP)
time.sleep(3)
os.kill(target_pid, signal.SIGCONT)
if __name__ == "__main__":
process_1 = Process(target=target_process_1)
process_1.start()
process_2 = Process(target=target_process_2, args=(process_1.pid,))
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Side note: if possible do not run a while True.
EDIT: if you want to manage your process in two different files, supposing you want to use a control by sharing a variable, this is a way to do.
# file foo.py
from multiprocessing import Value, Process
import time
__all__ = ['start', 'stop', 'pause', 'resume']
_statement = None
_process = None
def _target(run_statement):
""" Target of the foo's process """
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def start():
global _process, _statement
_statement = Value("i", 1)
_process = Process(target=_target, args=(_statement,))
_process.start()
def stop():
global _process, _statement
_process.terminate()
_statement, _process = None, _process
def enable():
_statement.value = True
def disable():
_statement.value = False
I have a python program that I have written. This python program calls a function within a module I have also written and passes it some data.
program:
def Response(Response):
Resp = Response
def main():
myModule.process_this("hello") #Send string to myModule Process_this function
#Should wait around here for Resp to contain the Response
print Resp
That function processes it and passes it back as a response to function Response in the main program.
myModule:
def process_this(data)
#process data
program.Response(data)
I checked and all the data is being passed correctly. I have left out all the imports and the data processing to keep this question as concise as possible.
I need to find some way of having Python wait for resp to actually contain the response before proceeding with the program. I've been looking threading and using semaphores or using the Queue module, but i'm not 100% sure how I would incorporate either into my program.
Here's a working solution with queues and the threading module. Note: if your tasks are CPU bound rather than IO bound, you should use multiprocessing instead
import threading
import Queue
def worker(in_q, out_q):
""" threadsafe worker """
abort = False
while not abort:
try:
# make sure we don't wait forever
task = in_q.get(True, .5)
except Queue.Empty:
abort = True
else:
# process task
response = task
# return result
out_q.put(response)
in_q.task_done()
# one queue to pass tasks, one to get results
task_q = Queue.Queue()
result_q = Queue.Queue()
# start threads
t = threading.Thread(target=worker, args=(task_q, result_q))
t.start()
# submit some work
task_q.put("hello")
# wait for results
task_q.join()
print "result", result_q.get()