Multithread application in python flask - python

I would like to ask same questions about the multithreading in python flask. Basically I have 3 files, where the first one is the main, second is thread and the last one is Flask webserver. What I want to achieve is to be able to control both "threads" from main. The AntBMS thread (second file) should be able to update the values in webserver. What I see now, that on connection the webserver creates a new thread. I think, that is not the right behaviour. At least when application terminates, it has to close all thread correctly.
# app.py
from flask import Flask, render_template
from flask_socketio import SocketIO
app = Flask(__name__)
socketio = SocketIO(app, async_mode=None)
#app.route('/')
def index():
return render_template('index.html')
#socketio.on('connect', namespace='/test')
def test_connect():
pass
# bms.py
from app import socketio
import configparser
import logging
import threading
import time
from random import random
BMS_CFG = 'bms.ini'
# set global logger
LOGGER = logging.getLogger(__name__)
config = configparser.ConfigParser()
# set default options
config.read_dict(
{
'CORE': {
'main_sleep_time': 5,
'read_sleep_time': 5
},
'LOGGER': {
'severity': logging.INFO
},
'WEBSERVER': {
'host': '0.0.0.0',
'port': 80,
'debug': False
}
}
)
config.read(BMS_CFG)
def logger_init(log_level: int):
"""Function to init logger"""
LOGGER.setLevel(logging.DEBUG)
stream = logging.StreamHandler()
stream.setLevel(log_level)
fmt = logging.Formatter('[%(levelname)s][%(threadName)s] %(message)s')
stream.setFormatter(fmt)
LOGGER.addHandler(stream)
logger_init(config.get('LOGGER', 'severity'))
class AntBMS():
def __init__(self):
LOGGER.debug("init")
def run(self):
LOGGER.info("Starting updating thread")
try:
while True:
# just a random data for now
t = round(random()*10, 3)
socketio.emit('message', {'data': t}, namespace='/test')
LOGGER.debug("Going to sleep for {}s".format(config.getint('CORE', 'main_sleep_time')))
socketio.sleep(config.getint('CORE', 'main_sleep_time'))
except Exception as ex:
print("error: " + str(ex))
# main.py
from app import app, socketio
from bms import AntBMS, config, LOGGER
import threading
if __name__ == '__main__':
print("start")
ant = AntBMS()
bms_thread = threading.Thread(target=ant.run, name="bms")
bms_thread.start()
socketio.run(app, host="0.0.0.0", port=80, debug=False, use_reloader=False)
LOGGER.info("Stop")
print("stop")
It works fine until I didn't connect :). As I've written I saw there a new thread-ID, so I guess, the thread has to be always the same. On the oder hand, it didn't correctly join the AntMBS thread.
Press CTRL+C to quit
[DEBUG][bms] Going to sleep for 5s
[DEBUG][bms] Going to sleep for 5s
^C[INFO][MainThread] Stop
stop
^CException ignored in: <module 'threading' from ...
in _shutdown
lock.acquire()
KeyboardInterrupt:
So how to do it correctly?
Thank you

Related

Flask-socketio not emitting data from class methods and registered signal slots

I am instantiating flas app from main class of my project, and from one of the module inside project i have created signal to emit data,using signalslot python library, and and flaskApp class is responsible for flask server, and on the same class signal is connected to slot to publish the data to connected websocket
For signalslot i am usign this library https://pypi.org/project/signalslot/
signals.py
import signalslot
on_heartbeat_data_received_signal = signalslot.Signal()
message.py
import signals.py
on_heartbeat_data_received_signal .emit(data="hello")
main.py
class Main():
flaskApp()
if __name__ == "__main__":
main = Main()
flaskApp.py
from flask import Flask
from flask_socketio import SocketIO, emit
from common.signals import on_heartbeat_data_received_signal
class FlaskApp():
def __init__(self,config_path):
self.logger = Logger.get_instance("FlaskApp")
self.logger.debug("started")
# Create an application
app = Flask(__name__)
app.config['SECRET_KEY'] = 'gjr39dkjn344_!67#' #todo read from yaml
socketio.init_app(app, async_mode="threading", cors_allowed_origins="*")
CORS(app)
# load yml to read flask configuration from config path
with open(config_path, "r") as yamlStream:
try:
self.flask_config = yaml.safe_load(yamlStream)["flask"]
except yaml.YAMLError as exc:
print("replace to logger")
self.logger.info(self.flask_config["host"])
self.logger.info(self.flask_config["port"])
self.logger.debug("connect to signal to send to UI..")
on_heartbeat_data_received_signal.connect(self.publish_data)
socketio.run(app, debug=True, host=self.flask_config["host"], port=self.flask_config["port"])
self.logger.debug("flask server is up....")
self.logger.debug("ended")
#socketio.on("connect")
def websocket_connect():
print("websocket connected...")
# socketio.emit("response", "mesg from server") # this line is working
#registered slot to capture data from signal
def publish_data(self, data, **kwargs):
self.logger.debug("started publish data to connected websocket client..")
self.logger.debug(data)
try:
socketio.emit("response-data", data) #this is not emiting data
except Exception as e:
self.logger.error(e)
#socketio.on("response-data")
def handle_message(message):
print(message)

How can i update global variable outside a thread?

I have a global varibale that share for all of function in my flask socketio server and a thread to start another feature to send notification for which connected to flask socketio server. The thread start from running and the global variable is passed into. The problem is when i change the global varibale outside the thread, it's not updated in the thread.
from flask import Flask, request
from flask_socketio import SocketIO, emit
from flask_cors import CORS, cross_origin
from generate_audio import *
from manage_time import receive_data, clock
from manage_audio import set_audio
import threading
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
CORS(app)
a = 0
#app.route("/demo", methods=["POST"])
#cross_origin(origin='localhost',headers=['Content-Type','Authorization'])
def receive_request():
global a
a += 1
return "Done"
def clock(a):
while True:
print(a)
if __name__ == "__main__":
global a
threading.Thread(target=clock, args=(a,)).start()
socketio.run(app, port=5000, debug=True, host="0.0.0.0")
How can i update a value inside the thread? It's always equal 0, i want it will be increased everytime i send a request to "/demo".
you can modify global variables like this:
global a
a += 1

Passing data received from a json request to another function in python flask socketio

I have a python flask app which receives data from json. I have also used socketio and threading in order to process data realtime.
In my program I need to send the data, that I receive from json requests, to another python function.
Below is the code that I wrote to do this: -
from flask_socketio import SocketIO, emit
from flask import Flask, render_template, request, url_for, copy_current_request_context
from random import random
from time import sleep
from pygeodesy.ellipsoidalVincenty import LatLon
from threading import Thread, Event
__author__ = 'shark'
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
app.config['DEBUG'] = True
# turn the flask app into a socketio app
socketio = SocketIO(app, async_mode=None, logger=True, engineio_logger=True)
# random number Generator Thread
thread = Thread()
thread_stop_event = Event()
#app.route('/platform-data', methods=['POST'])
def platformData():
"""
Generate a random number every 1 second and emit to a socketio instance (broadcast)
Ideally to be run in a separate thread?
"""
# infinite loop of magical random numbers
print("Receiving platform data")
while not thread_stop_event.isSet():
req_data = request.get_json()
id = req_data['id']
latitude = req_data['coordinates'][1]
longitude = req_data['coordinates'][0]
speed = req_data['speed']
angle = req_data['angle']
length = req_data['dimensions'][0]
width = req_data['dimensions'][1]
laneW = req_data['lane_width']
spdLmt = req_data['speed_limit']
return testProcess(speed)
def testProcess(speed):
if speed>30:
print("slow down")
socketio.emit('speed', {'speed': speed}, namespace='/test')
socketio.sleep(.5)
#app.route('/')
def index():
# only by sending this page first will the client be connected to the socketio instance
return render_template('index.html')
#socketio.on('connect', namespace='/test')
def test_connect():
# need visibility of the global thread object
global thread
print('Client connected')
# Start the random number generator thread only if the thread has not been started before.
if not thread.isAlive():
print("Starting Thread")
thread = socketio.start_background_task(platformData)
#socketio.on('disconnect', namespace='/test')
def test_disconnect():
print('Client disconnected')
if __name__ == '__main__':
socketio.run(app)
However, when I run the app and POST data from Postman, I get the below error in my console: -
TypeError: The view function did not return a valid response. The
return type must be a string, dict, tuple, Response instance, or WSGI
callable, but it was a int.
127.0.0.1 - - [05/Mar/2020 17:06:25] "POST /platform-data HTTP/1.1" 500 15625 0.008975
I know the reason for this is that I have declared return testProcess(speed).
Therefore, I need to know the correct way to pass speed variable to 'testProcess' function.
platformData() must return a a string, dict, tuple, Response instance, or WSGI callable but you make return the return of testProcess which is not defined
try:
def testProcess(speed):
if speed>30:
return "slow down"
else:
return "ok"
Or just call testProcess(speed) without return
and then return something else

Flask+gevent: logging events from within the Process instance not handled

I want to implement Server Side Events in my application. I'm using Flask with a gevent backend. I mostly follow the official snippet, with two important distinctions:
The function that fires events is spawned as a separate process (an instance of multiprocessing.Process), instead of gevent.spawn. It sends "inner" event
Event firing is done via logging, with QueueHandler from logutils
Here's a minimal example:
from flask import Flask, Response
import time
import logging
from gevent.wsgi import WSGIServer
from gevent.queue import Queue
import multiprocessing as mp
from logutils.queue import QueueHandler
app = Flask(__name__)
logger = logging.getLogger('events')
logger.setLevel(logging.INFO)
#app.route("/publish")
def publish():
logger = logging.getLogger('events')
def notify():
time.sleep(1)
logger.info('inner')
logger.info('outer')
p = mp.Process(target=notify)
p.start()
return "OK"
#app.route("/subscribe")
def subscribe():
def gen():
logger = logging.getLogger("events")
q = Queue()
handler = QueueHandler(q)
logger.addHandler(handler)
while True:
result = q.get().message
yield result
return Response(gen(), mimetype="text/event-stream")
if __name__ == "__main__":
root = logging.getLogger()
root.addHandler(logging.StreamHandler())
root.setLevel(logging.INFO)
app.debug = True
server = WSGIServer(("", 5000), app)
server.serve_forever()
The problem is, StreamHandler catches both "inner" and "outer" events and logs them to stdout. QueueHandler, on the other hand, only pushes the "outer" event to the queue, so it fails to catch the "inner" event, which originates in other process.
Now, if I use multiprocessing.Queue instead of gevent.queue, the "/subscribe" route blocks. If I use standard werkzeug server (with threading) in conjunction with multiprocessing.Queue, the code above works as expected.
What's going on and how to tackle this?

Run a function once on bottle.py startup

I have a bottle app that I eventually wan't to deploy on apache (just fyi in case that's important).
Now I need to run a function once after the bottle app is started. I can't just put it into a routed function because it has to run even if no user has accessed the site yet.
Any best pratice to do this ?
The function starts a APScheduler Instance and adds a jobstore to it.
Here's what I do.
def initialize():
//init whatever you need.
if __name__ == '__main__':
initialize()
#bottle.run(port='8080', yatta yatta)
Honestly your problem is simply a sync vs async issue. Use gevent to easily convert to microthreads, and then launch each separately. You can even add a delay either in your function or before with gevent.sleep if you want to wait for the web server to finish launching.
import gevent
from gevent import monkey, signal, spawn, joinall
monkey.patch_all()
from gevent.pywsgi import WSGIServer
from bottle import Bottle, get, post, request, response, template, redirect, hook, abort
import bottle
#get('/')
def mainindex():
return "Hello World"
def apScheduler():
print "AFTER SERVER START"
if __name__ == "__main__":
botapp = bottle.app()
server = WSGIServer(("0.0.0.0", 80), botapp)
threads = []
threads.append(spawn(server.serve_forever))
threads.append(spawn(apScheduler))
joinall(threads)
Create an APScheduler class.
Look at examples of object use and creation in this same site bacause it's too general to give an especific example to copy.
I don't know if this helps.
class Shed(object):
def __init__(self): # this to start it
# instruccions here
def Newshed(self, data):
# Call from bottle
# more methods ...
...
# init
aps = Shed() # this activates Shed.__init__()
...
# in the #router
x = aps.Newshed(data) # or whatever
Anyway I'm still learning this stuff and it's just an idea.
import threading
import bottle
def init_app():
def my_function_on_startup():
# some code here
pass
app = bottle.app()
t = threading.Thread(target=my_function_on_startup)
t.daemon = True
t.start()
return app
app = init_app()
#app.route("/")
def hello():
return "App is running"
if __name__ == "__main__":
bottle.run(app, host='localhost', port=8080, debug=True)

Categories