Can't access Wifi-Cam From OpenCV-Python - python

Im using a Wifi-Cam called Esp32-Cam and trying to connect to it with openCv. The Node server.js code makes it possible to connect to the module by it's IP address. The code for server.js is given below:
SERVER.JS:
const path = require('path');
const express = require('express');
const WebSocket = require('ws');
const app = express();
const WS_PORT = 8888;
const HTTP_PORT = 8000;
const wsServer = new WebSocket.Server({port: WS_PORT}, ()=> console.log(`WS Server is listening at
${WS_PORT}`));
let connectedClients = [];
wsServer.on('connection', (ws, req)=>{
console.log('Connected');
connectedClients.push(ws);
ws.on('message', data => {
connectedClients.forEach((ws,i)=>{
if(ws.readyState === ws.OPEN){
ws.send(data);
}else{
connectedClients.splice(i ,1);
}
})
});
});
app.get('/client',(req,res)=>res.sendFile(path.resolve(__dirname, './client.html')));
app.listen(HTTP_PORT, ()=> console.log(`HTTP server listening at ${HTTP_PORT}`));
CLİENT.HTML:
<html>
<head>
<title>Client</title>
</head>
<body>
<img src="">
<script>
const img = document.querySelector('img');
const WS_URL = 'ws:///192.168.1.33:8888';
const ws = new WebSocket(WS_URL);
let urlObject;
ws.onopen = () => console.log(`Connected to ${WS_URL}`);
ws.onmessage = message => {
const arrayBuffer = message.data;
if(urlObject){
URL.revokeObjectURL(urlObject);
}
urlObject = URL.createObjectURL(new Blob([arrayBuffer]));
img.src = urlObject;
}
</script>
</body>
I can access the streaming video on the address "http://192.168.1.33:8000/client" with my browser but not on opencv. Here's the code in OpenCv python trying to reach the camera.
OPENCV CODE:
cap = cv2.VideoCapture('http://192.168.1.33:8000/client')
print(cap.isOpened()) //it prints FALSE
while(True):
ret, frame = cap.read()
cv2.imshow('framee',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
I dont understand why i can't access the camera since this esp32 module is just like an IP camera? I would appreciate any help.
Thank you.

Related

Flask Live-Stream Not working with host attribute

I want to classify a video using opencv methods by using flask, the video to be classified is live-stream that is from the user.
I googled a lot, then finally I found some code and did this:-
from flask import Flask, render_template
from flask_socketio import SocketIO, emit
import time
import io
from PIL import Image
import base64,cv2
import numpy as np
# import pyshine as ps
from flask_cors import CORS,cross_origin
import imutils
# import dlib
from engineio.payload import Payload
# detector = dlib.get_frontal_face_detector()
# predictor = dlib.shape_predictor("shape_predictor_68_face_landmarks.dat")
Payload.max_decode_packets = 2048
app = Flask(__name__)
socketio = SocketIO(app,cors_allowed_origins='*' )
CORS(app)
#app.route('/', methods=['POST', 'GET'])
def index():
return render_template('index.html')
def readb64(base64_string):
idx = base64_string.find('base64,')
base64_string = base64_string[idx+7:]
sbuf = io.BytesIO()
sbuf.write(base64.b64decode(base64_string, ' /'))
pimg = Image.open(sbuf)
return cv2.cvtColor(np.array(pimg), cv2.COLOR_RGB2BGR)
def moving_average(x):
return np.mean(x)
#socketio.on('catch-frame')
def catch_frame(data):
emit('response_back', data)
global fps,prev_recv_time,cnt,fps_array
fps=30
prev_recv_time = 0
cnt=0
fps_array=[0]
#socketio.on('image')
def image(data_image):
global fps,cnt, prev_recv_time,fps_array
recv_time = time.time()
text = 'FPS: '+str(fps)
frame = (readb64(data_image))
# frame = changeLipstick(frame,[255,0,0])
# frame = ps.putBText(frame,text,text_offset_x=20,text_offset_y=30,vspace=20,hspace=10, font_scale=1.0,background_RGB=(10,20,222),text_RGB=(255,255,255))
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
imgencode = cv2.imencode('.jpeg', frame,[cv2.IMWRITE_JPEG_QUALITY,40])[1]
# base64 encode
stringData = base64.b64encode(imgencode).decode('utf-8')
b64_src = 'data:image/jpeg;base64,'
stringData = b64_src + stringData
# emit the frame back
emit('response_back', stringData)
fps = 1/(recv_time - prev_recv_time)
fps_array.append(fps)
fps = round(moving_average(np.array(fps_array)),1)
prev_recv_time = recv_time
#print(fps_array)
cnt+=1
if cnt==30:
fps_array=[fps]
cnt=0
def getMaskOfLips(img,points):
""" This function will input the lips points and the image
It will return the mask of lips region containing white pixels
"""
mask = np.zeros_like(img)
mask = cv2.fillPoly(mask,[points],(255,255,255))
return mask
def changeLipstick(img,value):
""" This funciton will take img image and lipstick color RGB
Out the image with a changed lip color of the image
"""
img = cv2.resize(img,(0,0),None,1,1)
imgOriginal = img.copy()
imgColorLips=imgOriginal
imgGray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = detector(imgGray)
for face in faces:
x1,y1 = face.left(),face.top()
x2,y2 = face.right(),face.bottom()
facial_landmarks = predictor(imgGray,face)
points =[]
for i in range(68):
x = facial_landmarks.part(i).x
y = facial_landmarks.part(i).y
points.append([x,y])
points = np.array(points)
imgLips = getMaskOfLips(img,points[48:61])
imgColorLips = np.zeros_like(imgLips)
imgColorLips[:] =value[2],value[1],value[0]
imgColorLips = cv2.bitwise_and(imgLips,imgColorLips)
value = 1
value=value//10
if value%2==0:
value+=1
kernel_size = (6+value,6+value) # +1 is to avoid 0
weight = 1
weight = 0.4 + (weight)/400
imgColorLips = cv2.GaussianBlur(imgColorLips,kernel_size,10)
imgColorLips = cv2.addWeighted(imgOriginal,1,imgColorLips,weight,0)
return imgColorLips
if __name__ == '__main__':
socketio.run(app,port=9990 ,debug=True)
This is in main.py of flask
In the templates folder I have made index.html with
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<style>
#video {
transform: rotateY(180deg);
-webkit-transform:rotateY(180deg); /* Safari and Chrome */
-moz-transform:rotateY(180deg); /* Firefox */
}
</style>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
<script src='https://cdnjs.cloudflare.com/ajax/libs/socket.io/2.0.0/socket.io.js'></script>
</head>
<body>
<div id="container">
<video autoplay playsinline id="videoElement"></video>
<canvas id="canvas" width="400" height="300"></canvas>
</div>
<div class = 'video'>
<img id="photo" width="400" height="300">
<h1>video</h1>
</div>
<script type="text/javascript" charset="utf-8">
var socket = io.connect(window.location.protocol + '//' + document.domain + ':' + location.port);
socket.on('connect', function(){
console.log("Connected...!", socket.connected)
});
var canvas = document.getElementById('canvas');
var context = canvas.getContext('2d');
const video = document.querySelector("#videoElement");
video.width = 400;
video.height = 300;
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
video.srcObject = stream;
video.play();
})
.catch(function (err0r) {
});
}
const FPS = 6;
setInterval(() => {
width=video.width;
height=video.height;
context.drawImage(video, 0, 0, width , height );
var data = canvas.toDataURL('image/jpeg', 0.5);
context.clearRect(0, 0, width,height );
socket.emit('image', data);
}, 1000/FPS);
socket.on('response_back', function(image){
photo.setAttribute('src', image );
});
</script>
</body>
</html>
The problem is: I need to make sure that it works on the user's device, so when I change socketio.run() and add host='0.0.0.0' It stops having access to my webcam
How can I solve this?
also I am unable to show back the video :(
The Error in Console
polling-xhr.js:264 GET http://localhost:9990/socket.io/?EIO=3&transport=polling&t=N_GnQuZ 400 (BAD REQUEST)
Hey,
First you are using a very old socketio client version.
https://cdn.socket.io/4.4.1/socket.io.min.js
use a newer version of socketio client, thats the reason for 400 Error, if you are going to the preview window of the request on dev tools (chrome) you have notice that there are a not supported version message.
Why you dont see the webcam!? is because you have no security context, when you go away from localhost like 0.0.0.0 you need a security context on chrome based browser it means you need HTTPS otherwise "navigator.mediaDevices.getUserMedia" will return undefined.
See this post

Black image is received after being sent over socket from android client

I have a TCP connection between my android app and Desktop PC, where I want to send the ImageView via socket. My problem is that the image is apparently sent successfully as it has 9.7KiB. However when I try visualizing this image I get a black image and no apparent errors are thrown in the Android Studio IDE.
The android app that sends an image:
private ImageView mImageView;
mImageView = (ImageView) findViewById(R.id.frame_image);
private final OnClickListener mOnClickListener = new OnClickListener() {
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.button_camera:
if (!Check.isFastClick()) {
return;
}
if (mCameraHandler != null) {
if (mCameraHandler.isOpened()) {
if (checkPermissionWriteExternalStorage()) {
Drawable drawable = mImageView.getDrawable();
Bitmap bitmap = getBitmapFromDrawable(drawable);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 0, baos);
byte[] array = baos.toByteArray();
SendImageClient sendImageClient = new SendImageClient();
sendImageClient.execute(array);
}
}
}
break;
};
public Bitmap getBitmapFromDrawable(Drawable drawable){
Bitmap bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(),drawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.drawColor(Color.WHITE);
drawable.draw(canvas);
return bitmap;
}
public class SendImageClient extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... voids) {
try {
Socket socket= new Socket("192.168.0.14",9999);
OutputStream out = socket.getOutputStream();
DataOutputStream dataOutputStream= new DataOutputStream(out);
dataOutputStream.write(voids[0],0,voids[0].length);
dataOutputStream.close();
out.close();
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
activity_main.xml
<com.serenegiant.widget.UVCCameraTextureView
android:id="#+id/camera_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerHorizontal="true"
android:layout_centerVertical="true"
android:layout_toRightOf="#id/menu_layout" />
<com.serenegiant.widget.AutoFitTextureView
android:id="#+id/textureView"
android:layout_width="480px"
android:visibility="invisible"
android:layout_toRightOf="#id/menu_layout"
android:layout_height="640px" />
<ImageView
android:id="#+id/frame_image"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBottom="#id/camera_view"
android:layout_alignLeft="#id/camera_view"
android:layout_alignRight="#id/camera_view"
android:layout_alignTop="#id/camera_view" />
Server script.py
from socket import *
port = 9999
s = socket(AF_INET, SOCK_STREAM)
s.bind(('', port))
s.listen(1)
conn, addr = s.accept()
print("Connected by the ",addr)
with open('/home/pi/Desktop/frames_saved/image.jpg', 'wb') as file:
while True:
data = conn.recv(1024*8)
if not data: break
file.write(data)
conn.close()
Why am I getting a black image, and how can I get the actual image being displayed in ImageView sent to the desktop?
This code only create a empty bitmap
Bitmap bitmap = Bitmap.createBitmap(mImageView.getWidth(), mImageView.getHeight(), Bitmap.Config.ARGB_8888);
getBitmap from ImageView
ImageView mImageView = findViewById(R.id.image);
Drawable drawable = mImageView .getDrawable();
Bitmap bitmap = getBitmapFromDrawable(drawable);
public Bitmap getBitmapFromDrawable(Drawable drawable){
Bitmap bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(),drawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.drawColor(Color.WHITE);
drawable.draw(canvas);
return bitmap;
}

why is C# getting a distorted image from a python server?

I'm trying to transfer camera stream from my raspberry pi to pc as fast as possible. Now I'm trying to transfer it using MJPEG method. So I have a python script on my raspberry as server:
import io
import socket
import struct
import time
import cv2
class SplitFrames(object):
def __init__(self, connection):
self.connection = connection
self.stream = io.BytesIO()
self.count = 0
def write(self, buf):
if buf.startswith(b'\xff\xd8'):
# Start of new frame; send the old one's length
# then the data
size = self.stream.tell()
if size > 0:
self.connection.write(struct.pack('<L', size))
self.connection.flush()
self.stream.seek(0)
self.connection.write(self.stream.read(size))
self.count += 1
self.stream.seek(0)
self.stream.write(buf)
server_socket = socket.socket()
server_socket.bind(('0.0.0.0', 8001))
server_socket.listen(0)
# Accept a single connection and make a file-like object out of it
connection = server_socket.accept()[0].makefile('wb')
try:
output = SplitFrames(connection)
time.sleep(2)
cap = cv2.VideoCapture(0)
while True:
ret, img = cap.read()
ret, jpg = cv2.imencode('.jpg', img)
output.write(jpg.tostring())
connection.write(struct.pack('<L', 0))
finally:
connection.close()
server_socket.close()
that works well. Also I have a python script on my pc as client, that also works well and I receive a good stream:
import io
import socket
import struct
from PIL import Image
import cv2
import numpy as np
# Start a socket listening for connections on 0.0.0.0:8000 (0.0.0.0 means
# all interfaces)
client_socket = socket.socket()
client_socket.connect(("10.12.34.2", 8001))
connection = client_socket.makefile('rb')
try:
while True:
# Read the length of the image as a 32-bit unsigned int. If the
# length is zero, quit the loop
a = connection.read(struct.calcsize('<L'))
image_len = struct.unpack('<L', a)[0]
if not image_len:
break
# Construct a stream to hold the image data and read the image
# data from the connection
image_stream = io.BytesIO()
image_stream.write(connection.read(image_len))
# Rewind the stream, open it as an image with PIL and do some
# processing on it
image_stream.seek(0)
image = Image.open(image_stream)
cv_image = np.array(image)
cv2.imshow('Stream',cv_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
connection.close()
client_socket.close()
But I needed to write client side on WPF, so I wrote it, but it doesn't work well. I receive distorted stream and after a few seconds WPF app breaks and gives me System.IO.FileFormatException. Here is the image from WPF:
What could be the reason of this? Thanks for any advice!
Here is the minimum reproducible code:
C#
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Windows;
using System.Windows.Media.Imaging;
namespace WPFTest
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
private static bool connected = false;
private Socket sct;
private IPEndPoint ipPoint;
// TCP
private bool stopThread = false;
private Thread thread;
public MainWindow()
{
InitializeComponent();
}
private void ConnectButton_Click(object sender, RoutedEventArgs e)
{
if (!connected)
{
try
{
//this.ipPoint = new IPEndPoint(IPAddress.Parse("127.0.0.1"), int.Parse("8001", CultureInfo.InvariantCulture));
this.ipPoint = new IPEndPoint(IPAddress.Parse("10.12.34.2"), int.Parse("8001", CultureInfo.InvariantCulture));
}
catch (Exception error)
{
return;
}
this.sct = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.IP);
stopThread = false;
thread = new Thread(() => { Listening(); });
thread.IsBackground = false;
thread.Start();
connected = true;
}
}
private void DisconnectButton_Click(object sender, RoutedEventArgs e)
{
if (connected)
{
StopListening();
connected = false;
}
}
private void Listening()
{
try
{
sct.Connect(ipPoint);
}
catch (Exception e)
{
StopListening();
connected = false;
return;
}
Thread.Sleep(1000);
try
{
while (!this.stopThread)
{
byte[] data = GetInputBytes(sct);
Application.Current.Dispatcher.Invoke(() =>
{
SetImage(data);
});
}
sct.Shutdown(SocketShutdown.Both);
sct.Close();
}
catch (Exception e)
{
StopListening();
}
}
public void StopListening()
{
this.stopThread = true;
try
{
thread.Abort();
sct.Shutdown(SocketShutdown.Both);
sct.Close();
}
catch (Exception e)
{
//
}
}
private void SetImage(byte[] array)
{
var image = new BitmapImage();
using (var mem = new MemoryStream(array))
{
mem.Position = 0;
image.BeginInit();
image.CreateOptions = BitmapCreateOptions.PreservePixelFormat;
image.CacheOption = BitmapCacheOption.OnLoad;
image.UriSource = null;
image.StreamSource = mem;
image.EndInit();
}
image.Freeze();
ImageCamera.Source = image;
}
public static byte[] GetInputBytes(Socket clientSocket)
{
byte[] rcvLenBytes = new byte[4];
clientSocket.Receive(rcvLenBytes);
UInt32 rcvLen = BytesToInt(rcvLenBytes);
byte[] rcvBytes;
byte[] clientData;
List<byte> rcvBytesList = new List<byte>();
int totalBytes = 0;
while (totalBytes < rcvLen)
{
if (rcvLen - totalBytes < 262144)
{
clientData = new byte[rcvLen - totalBytes];
}
else
{
clientData = new byte[262144];
}
int bytesReceived = clientSocket.Receive(clientData);
rcvBytesList.AddRange(clientData);
totalBytes += bytesReceived;
}
rcvBytes = rcvBytesList.ToArray();
return rcvBytes;
}
public static UInt32 BytesToInt(byte[] arr)
{
UInt32 wd = ((UInt32)arr[3] << 24) | ((UInt32)arr[2] << 16) | ((UInt32)arr[1] << 8) | (UInt32)arr[0];
return wd;
}
}
}
XAML
<Window x:Class="WPFTest.MainWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:local="clr-namespace:WPFTest"
mc:Ignorable="d"
Title="MainWindow" Height="450" Width="800">
<Grid>
<Grid.RowDefinitions>
<RowDefinition Height="*"></RowDefinition>
<RowDefinition Height="*"></RowDefinition>
<RowDefinition Height="200px"></RowDefinition>
</Grid.RowDefinitions>
<Grid.ColumnDefinitions>
<ColumnDefinition Width="*" ></ColumnDefinition>
<ColumnDefinition Width="220px" ></ColumnDefinition>
<ColumnDefinition Width="*" ></ColumnDefinition>
<ColumnDefinition Width="180px" ></ColumnDefinition>
</Grid.ColumnDefinitions>
<Button Grid.Column="2" Grid.Row="3" Height="60" Width="140" Content="Connect" x:Name="ConnectButton" FontSize="20" FontFamily="LilyUPC" FontWeight="Bold" HorizontalAlignment="Left" VerticalAlignment="Top" Margin="20, 80, 20, 0" Click="ConnectButton_Click"/>
<Button Grid.Column="0" Grid.Row="3" Height="60" Width="140" Content="Disconnect" x:Name="DisconnectButton" FontSize="20" FontFamily="LilyUPC" FontWeight="Bold" HorizontalAlignment="Right" VerticalAlignment="Top" Margin="20, 80, 20, 0" Click="DisconnectButton_Click"/>
<Image Grid.Column="0" Grid.Row="0" Grid.RowSpan="2" Grid.ColumnSpan="3" Margin="10" RenderOptions.BitmapScalingMode="NearestNeighbor" RenderOptions.EdgeMode="Aliased" x:Name="ImageCamera"></Image>
</Grid>
</Window>
Thanks to #ThomasWeller for their answer in comments! As they said:
AddRange adds the whole buffer. How should it know how many bytes were received?
So I rewrote my receive method like this:
public static byte[] GetInputBytes(Socket clientSocket)
{
byte[] rcvLenBytes = new byte[4];
clientSocket.Receive(rcvLenBytes);
UInt32 rcvLen = BytesToInt(rcvLenBytes);
byte[] rcvBytes;
byte[] clientData;
List<byte> rcvBytesList = new List<byte>();
int totalBytes = 0;
while (totalBytes < rcvLen)
{
if (rcvLen - totalBytes < 262144)
{
clientData = new byte[rcvLen - totalBytes];
}
else
{
clientData = new byte[262144];
}
int bytesReceived = clientSocket.Receive(clientData);
rcvBytesList.AddRange(clientData.Take(bytesReceived).ToArray());
totalBytes += bytesReceived;
}
rcvBytes = rcvBytesList.ToArray();
return rcvBytes;
}
Changed line is: rcvBytesList.AddRange(clientData.Take(bytesReceived).ToArray());
It works fine now.

Transfer data from python to nodejs using requests and socket

I try to send data from a simple python programm to a node server. But no success. That's why I ask some help.
My simple python :
import requests
SIGNUP_URL = 'http://localhost:8000/timer'
def submit_form():
obj = {name:'whateever'}
resp = requests.post(SIGNUP_URL, data = obj)
if __name__ == '__main__':
submit_form()
my nodejs (light, I remove not concern lines) :
var http = require('http');
var express = require('express');
var app = express();
app.get('/', function (request, response) {
response.sendFile(__dirname + '/public/index.html');
});
var server = http.createServer(app);
var io = require('socket.io')(server);
const bodyParser = require('body-parser');
const path = require('path');
const {execFile, exec, spawn} = require ('child_process');
app.use(express.static('public'));
app.use(bodyParser.urlencoded({ extended: true }));
app.post('/timer', function(req, res){
res.sendFile(__dirname + '/public/status.html');
var test = "test";
var data = req.body;
var info = data.name;
io.emit('messageFromServer', { info });
console.log('info')
});
server.listen(8000, console.log("listening to port 8000"));
So, when I execute my python I want transfering to the server the data "name : whatever", then I want the server write the data into the console (to be sure the data is well sent), and I all is ok, I want to emit this data to my html page...
Thanks for helping me.
The answer :
python code :
import requests
SIGNUP_URL = 'http://localhost:8000/timer'
def submit_form():
obj = {'name':'whateever'}
resp = requests.post(SIGNUP_URL, data = obj)
if __name__ == '__main__':
submit_form()
nodejs code :
app.post('/timer', function(req, res){
res.sendFile(__dirname + '/public/status.html');
var info= req.body;
io.emit('messageFromServer', info);
console.log(info)
});
It works.

How to automatically start and stop a python script in nodejs?

I am developing a temperature monitoring application in a hen house with a web interface. I use two arduinos and a Raspberry.
Arduino 1: I connected a temperature / humidity sensor and an RF433Mhz transmitter.
Arduino 2: An RF433Mhz receiver is connected to it. It receives data from Arduino 1.
Raspberry: Arduino 2 is connected to my raspberry which reads the data received in the serial monitor and send them to the web page via the websockets (package ws of nodejs).
At first I wanted to read this data directly with Nodejs, but I had some problems with the installation of the serial port package.
So I changed my approach: I read the data in the serial monitor with python, write it in files, and Nodejs reads these files and sends the data to the web page.
here are the two codes I use:
Phyton script
import serial
import time
ser = serial.Serial('/dev/ttyACM0', 9600)
while True:
data = ser.readline()
if data:
t = data[0:2]
h = data[6:8]
#decode utf-8
tc = t.decode("utf-8")
hc = h.decode("utf-8")
#write the temperature in the temp file
fileTc=open('temp', 'w')
fileTc.write(str(tc))
fileTc.close
#write the humidity in the hum file
fileHc=open('hum', 'w')
fileHc.write(str(hc))
fileHc.close
#sleep
time.sleep(.1)
Nodejs Script
var express = require("express");
const WebSocket = require('ws');
const wss = new WebSocket.Server({port: 4400});
var path = require("path");
var fs = require("fs");
var sys = require("util");
var exec = require("child_process").exec;
var tempcpu = 0;
var temp = 0;
var hum = 0;
var app = express();
app.set("port", process.env.PORT || 5500);
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "ejs");
app.use('/', express.static('public'));
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
console.log('received: %s', message);
});
setInterval(function(){
child1 = exec("cat /sys/class/thermal/thermal_zone0/temp",
function(error, stdout,stderr){
if (error !== null){
console.log('exec error: ' +error);
} else{
tempcpu = parseFloat(stdout)/1000;
}
});
child2 = exec("cat temp", function(error, stdout,stderr){
if (error !== null){
console.log('exec error: ' +error);
} else{
temp = parseFloat(stdout);
}
});
child3 = exec("cat hum", function(error, stdout,stderr){
if (error !== null){
console.log('exec error: ' +error);
} else{
hum = parseFloat(stdout);
}
});
var tempCPU = JSON.stringify(["cpu",tempcpu]);
var temperature = JSON.stringify(["temp",temp]);
var humidity = JSON.stringify(["hum",hum]);
ws.send(tempCPU);
ws.send(temperature);
ws.send(humidity);
}, 5000);
});
app.get("/", function(request, response) {
response.render("dashboard");
});
app.listen(app.get("port"), function() {
console.log("Server started at port " + app.get("port"));
});
for now I have to launch both scripts separately. I would like to run my python script directly from nodejs when I start the node server, and stop it when I stop my nodejs code (CTRL + C).
Do you have an idea of ​​how to do it?
What you want to achieve is spawn a new process in which you execute something from either a Node app or a Python app:
NodeJS approach: Child process
const { spawn } = require('child_process');
const pythonApp = spawn('python', ['my_python_app.py']);
Python approach: Subprocess
import subprocess
node_app = subprocess.Popen(["node","my_node_app.js"], stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
EDIT
Regarding catching the INTERRUPT (CTRL+C) signal, this can also be done in both languages; and leveraged to kill the process you spawned:
With NodeJS:
process.on('SIGINT', () => {
console.log("Caught interrupt signal");
if(pythonApp) pythonApp.exit();
});
With Python:
import sys
try:
# Your app here...
except KeyboardInterrupt:
print("Caught interrupt signal")
if node_app: node_app.kill()
sys.exit()

Categories