Problem with Time Synchronisation in 5G and 2,4G Packet Lag measurment - python

I have to compare the lag in a server-client model in 2,4G and 5G.
My anticipation is that 5G is faster than the 2,4G by a large margin. I have already taken the 5G measurments. The average lag turned out to be 40.2ms -which is above what I was predicting-. The issue beacme visible when I tried the same with the 2,4G setup, and the lag was calculated to be a negative value. The two computers on which I was running the codes werent really synchronised. I would appriciate any input on how I would solve this issue.
I wrote the code on Jupyter as a Notebook.
Below you can find powerpoints of the setups used and the respective code used for the client and the server. The results displayed are in micro seconds.
5G Setup
2,4G Setup
Server Code:
#!/usr/bin/env python
# coding: utf-8
# In[1]:
from flask import Flask
from flask import request
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import csv
import time
from time import sleep
from decimal import Decimal
# In[2]:
test = 1
# In[3]:
#create csv. file to append data
file_name = "2.4G_Time_Data_" + str(test)
test = test + 1
print(file_name)
with open(file_name+'.csv', 'w', newline='') as time_file:
spamwriter = csv.writer(time_file, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(['Packet','Lag(uS)'])
# In[ ]:
#start running a server, saves the in coming data in a csv file
received_package = 0
app = Flask(__name__)
#app.route('/postjson', methods = ['POST'])
def postJsonHandler():
global received_package
received_package = received_package + 1
print(request.is_json)
content = request.get_json()
print (content)
now = datetime.now();
time = content["time"]
time_now = datetime.timestamp(now)
print("Sent : " + str(time))
print("Received : " + str(time_now) )
delta_time = (time_now - time) * (10**6) # in micro seconds
print("Packet Travel Time(s) : " + str(delta_time) )
with open(file_name+'.csv', 'a') as f:
writer = csv.writer(f)
writer.writerow([str(received_package), str(delta_time)])
return 'JSON Received'
app.run(host = '0.0.0.0' , port = 8090)
Client Code:
from datetime import datetime
import requests
import signal
from time import sleep
import time
import os
import sys
import json
sample_size = 1000
for i in range(sample_size) :
now = datetime.now()
time = now.strftime("%H:%M:%S") + ":" + str(now.microsecond)
#time = str(now)
timestamp = datetime.timestamp(now)
requests.post('http://myIP:8090/postjson', json={'time': timestamp})
print ("Estimated size: " + str(sys.getsizeof(json) / 1024) + "KB")
sleep(0.1)
My 2.4G Measurment

Related

How to create a new array by shifting pyserial readline() output rows?

I am a newbie in machine automation. I am working on collecting data from 2 Micrometers using python. I am collecting data from 2 induvial COM ports. I run the following python script to record the data and display it on python shell.
import time
import datetime
import serial
import matplotlib.pyplot as plt
import pandas as pd
# configure the serial connections
ser1 = serial.Serial(
port='COM4', baudrate=115200,
bytesize=8,
stopbits=1, timeout=0.02)
print("Connected to: " + ser1.port)
ser2 = serial.Serial(
port='COM5', baudrate=115200,
bytesize=8,
stopbits=1, timeout=0.02)
print("Connected to: " + ser2.port)
ser1.isOpen()
ser2.isOpen()
# Read data from the serial port, which will run in an infinite loop.
print("Instrument Type: Micrometer")
file_name = str(input('\nInsert Filename: '))
run_time = float(input('\nInsert Runtime: '))
start = time.time()
with open('./' + file_name + '.csv', 'w+') as output_file:
output_file.write('Timestamp,Data1, Data2\r')
while time.time()-start < run_time:
timestamp = str.format('{0:.3f}', (time.time()-start))
data1 = ser1.readline().decode().rstrip()
data2 = ser2.readline().decode().rstrip()
output_file.write(timestamp + ',' + data1 + ',' + data2 + '\r')
time.sleep(0.01 -(time.time()-start)%0.01)
print(timestamp, data1, data2)
print('Data Recorded')
It gives me the following output that I want (don't worry about the values).
Now, I want to apply shift() like function on data1(i.e., ser1) values to create a new column alongside, which will start storing data1 values from 4th row onwards and first 3 will be null/nan. Needless to say, it needs to be completed in real-time as a continuous output.

I can't seem to get my python code to run on my raspberry pi zero on bootup in cronjob

So I added this line to my crontab at the end of the file, but I cannot seem to get my program to run. I'm not logging anything in my scubot.log so that's not very helpful. Not sure what I'm doing wrong, I've tried several solutions to this but can't seem to find any good information, at least nothing that fits my case.
#reboot sleep 15; /usr/bin/python3 /home/pi/Documents/scubot/scubot.py >> /home/pi/Documents/scubot/scubot.log
from sqlite3 import Date, Error
import ssl
import requests
import configparser
from datetime import date, datetime
import schedule
import time
import emailHelper
from email_templates import emails
# import RPi.GPIO as GPIO
# import time
config = configparser.ConfigParser()
config.read(".config")
configstuff = config['Weather_API']['weatherAPIKey']
lat = "41.6884"
long = "-93.7925"
import pymongo
# Replace the uri string with your MongoDB deployment's connection string.
conn_str = "mongodb+srv://sensitive.nqx9x.mongodb.net/?retryWrites=true&w=majority"
# set a 5-second connection timeout
client = pymongo.MongoClient(conn_str, serverSelectionTimeoutMS=5000)
db = client.scubot
weatherDataCollection = db.weatherdata
# create env to only run this when on the raspberry pi?
import os
import time
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
device_file = '/sys/devices/w1_bus_master1/28-0620111704e4/w1_slave'
print('Scubot Started')
def read_temp_raw():
f = open(device_file, 'r')
lines = f.readlines()
f.close()
print('lines', lines)
return lines
def read_temp():
lines = read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
print('temp string', temp_string)
temp_c = float(temp_string) / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
return temp_f
response = requests.get("https://api.openweathermap.org/data/2.5/weather?lat=" + lat + "&lon=" + long + "&units=imperial&appid=" + configstuff + "").json()
weather = response['main']
city = response['name']
icon = response['weather'][0]['icon']
number1 = 498474239847239
boolean = True
# print(city)
# print(weather)
def storeWeatherData():
try:
weatherData = {
"city": city,
"outsideTemp" : weather['temp'],
"humidity" : weather['humidity'],
"outsidePressure" : weather['pressure'],
"lat": lat,
"long": long,
"shallowProbeTemp": "{:.2f}".format(read_temp()),
"dateOfTempReading": datetime.now()
}
weatherDataCollection.insert_one(weatherData)
print(weatherData, 'Successfully inserted into the database')
except Exception:
print(Exception)
# emailHelper.sendEmail(['test'], 'testing scuba app', emails.temperatureEmail(weather['temp'], lat, long))
# # This uses a scheduler to run a certain function every day. We can set this to minutes or hours or days.
schedule.every(1).minutes.do(storeWeatherData)
while True:
schedule.run_pending()
time.sleep(1)

Azure Storage Python SDK : Uploading file to Azure blob storage without writting it on my disk

I have a lot of Images from my Apache server that I want to put to azure.
I cannot afford to do it in a sequential manner , SO I will add threading afterwards. I can access those images from a given URL and build a list on that. Easy.
Now I do not have enough disk space for downloading the image and uploading it then delete it. I would like something cleaner.
Now is there a method to do that ?
Something like :
block_blob_service.AZURECOMMAND(container, source_URL, target_blob_name)
If not possible, is there a workaround ?
here is the complete code I have today ( download and then upload which I want to avoid ):
EDIT : Thanks to Gaurav Mantri I got it now. I update the code.
import requests
from bs4 import BeautifulSoup
from os.path import basename
import os
import sys
import urllib
import urllib2
import urlparse
import argparse
import json
import config
import random
import base64
import datetime
import time
import string
from azure.storage import CloudStorageAccount, AccessPolicy
from azure.storage.blob import BlockBlobService, PageBlobService, AppendBlobService
from azure.storage.models import CorsRule, Logging, Metrics, RetentionPolicy, ResourceTypes, AccountPermissions
from azure.storage.blob.models import BlobBlock, ContainerPermissions, ContentSettings
#from azure.storage.blob import BlobService
from azure.storage import *
#from azure.storage.blob.blobservice import BlobService
CURRENT_DIR = os.getcwd()
STORING_DIRECTORY_NAME = "stroage_scrapped_images"
STORING_DIRECTORY = CURRENT_DIR+"/"+STORING_DIRECTORY_NAME
if not os.path.exists(STORING_DIRECTORY):
os.makedirs(STORING_DIRECTORY)
def randomword(length):
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(length))
startdate = time.clock()
metadata_loaded = {'Owner': 'ToBeAddedSoon', 'Date_Of_Upload': startdate, 'VAR_2': 'VAL_VAR_2','VAR_3': 'VAL_VAR_3','VAR_4': 'VAL_VAR_4'}
with open("credentials.json", 'r') as f:
data = json.loads(f.read())
StoAcc_var_name = data["storagacc"]["Accountname"]
StoAcc_var_key = data["storagacc"]["AccountKey"]
StoAcc_var_container = data["storagacc"]["Container"]
#print StoAcc_var_name, StoAcc_var_key, StoAcc_var_container
def copy_azure_files(source_url,destination_object,destination_container):
blob_service = BlockBlobService(account_name=StoAcc_var_name, account_key=StoAcc_var_key)
blob_service.copy_blob(destination_container, destination_object, source_url)
block_blob_service = BlockBlobService(account_name=StoAcc_var_name, account_key=StoAcc_var_key)
def upload_func(container,blobname,filename):
start = time.clock()
block_blob_service.create_blob_from_path(
container,
blobname,
filename)
elapsed = time.clock()
elapsed = elapsed - start
print "*** DEBUG *** Time spent uploading API " , filename , " is : " , elapsed , " in Bucket/container : " , container
#URL_TARGET = "https://mouradcloud.westeurope.cloudapp.azure.com/blog/blog/category/food/"
URL_TARGET = "https://www.cdiscount.com/search/10/telephone.html"
base_url = URL_TARGET
out_folder = '/tmp'
r = requests.get(URL_TARGET)
data = r.text
soup = BeautifulSoup(data, "lxml")
for link in soup.find_all('img'):
src = link
image_url = link.get("src")
while image_url is not None :
if 'http' in image_url:
blocks = []
if image_url.endswith(('.png', '.jpg', '.jpeg')):
print " ->>>>>>>>>>>>>> THIS IS AN IMAGE ... PROCESSING "
file_name_downloaded = basename(image_url)
file_name_path_local = STORING_DIRECTORY+"/"+file_name_downloaded
with open(file_name_path_local, "wb") as f:
f.write(requests.get(image_url).content)
filename_in_clouddir="uploads"+"/"+file_name_downloaded
#upload_func(StoAcc_var_container,filename_in_clouddir,file_name_path_local)
copy_azure_files(image_url,filename_in_clouddir,StoAcc_var_container)
break
else :
print " ->>>>>>>>>>>>>> THIS NOT AN IMAGE ... SKIPPING "
break
else :
print " ->>>>>>>>>>>>>> THIS IS A LOCAL IMAGE ... SKIPPING "
break
continue
Indeed there's something exactly like this: copy_blob
block_blob_service.copy_blob(container, target_blob_name, source_URL)
Please keep in mind that this copy operation is asynchronous server side copying, thus:
Source of the copy should be publicly available.
You must wait for the copy operation to finish before deleting source items.
UPDATE
Modified code (I have not tried running it)
import requests
from bs4 import BeautifulSoup
from os.path import basename
import os
import sys
import urllib
import urllib2
import urlparse
import argparse
import json
import config
import random
import base64
import datetime
import time
import string
from azure.storage import CloudStorageAccount, AccessPolicy
from azure.storage.blob import BlockBlobService, PageBlobService, AppendBlobService
from azure.storage.models import CorsRule, Logging, Metrics, RetentionPolicy, ResourceTypes, AccountPermissions
from azure.storage.blob.models import BlobBlock, ContainerPermissions, ContentSettings
CURRENT_DIR = os.getcwd()
STORING_DIRECTORY_NAME = "stroage_scrapped_images"
STORING_DIRECTORY = CURRENT_DIR+"/"+STORING_DIRECTORY_NAME
if not os.path.exists(STORING_DIRECTORY):
os.makedirs(STORING_DIRECTORY)
def randomword(length):
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(length))
startdate = time.clock()
metadata_loaded = {'Owner': 'ToBeAddedSoon', 'Date_Of_Upload': startdate, 'VAR_2': 'VAL_VAR_2','VAR_3': 'VAL_VAR_3','VAR_4': 'VAL_VAR_4'}
with open("credentials.json", 'r') as f:
data = json.loads(f.read())
StoAcc_var_name = data["storagacc"]["Accountname"]
StoAcc_var_key = data["storagacc"]["AccountKey"]
StoAcc_var_container = data["storagacc"]["Container"]
#print StoAcc_var_name, StoAcc_var_key, StoAcc_var_container
block_blob_service = BlockBlobService(account_name=StoAcc_var_name, account_key=StoAcc_var_key)
def upload_func(container,blobname,sourceurl):
start = time.clock()
block_blob_service.copy_blob(
container,
blobname,
sourceurl)
elapsed = time.clock()
elapsed = elapsed - start
print "*** DEBUG *** Time spent uploading API " , filename , " is : " , elapsed , " in Bucket/container : " , container
#URL_TARGET = "https://mouradcloud.westeurope.cloudapp.azure.com/blog/blog/category/food/"
URL_TARGET = "https://www.cdiscount.com/search/10/telephone.html"
base_url = URL_TARGET
out_folder = '/tmp'
r = requests.get(URL_TARGET)
data = r.text
soup = BeautifulSoup(data, "lxml")
for link in soup.find_all('img'):
src = link
image_url = link.get("src")
while image_url is not None :
if 'http' in image_url:
blocks = []
if image_url.endswith(('.png', '.jpg', '.jpeg')):
print " ->>>>>>>>>>>>>> THIS IS AN IMAGE ... PROCESSING "
file_name_downloaded = basename(image_url)
filename_in_clouddir="uploads"+"/"+file_name_downloaded
upload_func(StoAcc_var_container,filename_in_clouddir,image_url)
break
else :
print " ->>>>>>>>>>>>>> THIS NOT AN IMAGE ... SKIPPING "
break
else :
print " ->>>>>>>>>>>>>> THIS IS A LOCAL IMAGE ... SKIPPING "
break
continue

How to add muti threading or multi processing

I am running below scripts it taking almost 35 sec for all stocks. Is there any lib to run faster for all stocks at a time
import schedule
import time
from kiteconnect import KiteConnect
import CSV
import JSON
import requests
import pandas_datareader.data as pdr
import pandas as pd
import matplotlib.pyplot as plt
import time
import subprocess
Def job():
api_key='YOUR_API'
api_secret='YOUR_SECRETKEY'
api_token='YOUR_ACESSTOKEN'
kite=KiteConnect(api_key=api_key)
kite.set_access_token('YOUR_ACCESStoken')
Stocks = ['BANKINDIA','CAPF','CHENNPETRO','DLF',
'EQUITAS','ESCORTS','FORTIS','HEXAWARE',
'IDBI','IDFCBANK','IOC','IRB','ITC','JUBLFOOD',
'KPIT','OFSS','ONGC','PFC','PNB',
'RPOWER','TATAPOWER','VGUARD','WOCKPHARMA']
for testst in Stocks:
print(testst)
Kite_TODAY="https://api.kite.trade/instruments/NSE/%s?api_key='YOUR_API'&access_token='ACCESS_TOKEN'"
print(Kite_TODAY % testst)
r = requests.get(Kite_TODAY % testst)
rjson=r.json()
r1=rjson['data']['last_price']
Open = rjson['data']['ohlc']['open']
High = rjson['data']['ohlc']['high']
Low = rjson['data']['ohlc']['low']
Close = rjson['data']['ohlc']['close']
print(" Stock %s Open %s High %s Low %s Close %s",testst,Open,High,Low,Close)
if ( Open == High ):
testkite = (("kite.order_place(tradingsymbol='%s',exchange='NSE',quantity=1,price=%s,squareoff_value=1,stoploss_value=5,variety='bo',transaction_type='SELL',order_type='LIMIT',product='MIS',validity='DAY')") % (testst,Open))
order1=testkite
order2=exec(order1)
print(order2)
print (" working...")
return
schedule.every().day.at ("09:15").do (job)
While True:
schedule.run_pending()
time.sleep (1)

Serial data capture and real time plotting: Drawnow, matplotlib, python

I am new to programming and after a few weeks have made some programs to do simple things; like capture serial data from an arduino and save it to a text file.
Now I want to combine a couple things. I want to use python to capture serial data, prompt for port and filename, take that data and plot it real time, then when the arudino is no longer connected, save and close file. Here is the code I have so far.
problem is the graph is not real time at all. The sensors show almost no change. I also sometimes get a matplotlib depreciation warning. I wondering if there is a quick fix or if I am missing something crucial. Thank you so much!
import numpy
import matplotlib.pyplot as plt
import math
import time
import pylab
from drawnow import drawnow
import csv
import serial
import os
import glob
import sys
filename = raw_input("Save file as: ")
saveFile = open(filename, 'w')
print "Available ports: "
def serial_port():
if sys.platform.startswith('win'):
ports = ['COM%s' % (i + 1) for i in range (256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
ports = glob.glob('/dev/tty/[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.*')
else:
raise EnvironmentError('Unsupported Platform')
result = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException):
pass
return result
if __name__ == '__main__':
print serial_port()
serialport = raw_input("Enter Port: ")
port1 = serialport
print "Connecting to port...", port1
arduino1 = serial.Serial(port1, 115200)
print "Arduino Detected"
#create arrays with the following names
Time = []
analog0 = []
analog1 = []
voltage0 = []
voltage1 = []
Temp = []
RH = []
#reading data from the serial port
#telling Matplot.lib to plot live data
plt.ion()
#creates a function to make a plot we want
def Fig1():
plt.plot(analog0, 'r-')
plt.title("Analog0 Data")
plt.ylim(405, 425)
plt.grid(True)
plt.ylabel("analog")
plt.xlabel("milliseconds")
x = os.path.exists(port1)
while x==0:
arduinoString = arduino1.readline()
saveFile.write(arduinoString)
dataArray = arduinoString.split(',')
time = float(dataArray[0])
a0 = float(dataArray[1])
a1 = float(dataArray[2])
v0 = float(dataArray[3])
v1 = float(dataArray[4])
temp = float(dataArray[5])
rh = float(dataArray[6])
#filling our arrays with those new data values (floats)
Time.append(time)
analog0.append(a0)
analog1.append(a1)
voltage0.append(v0)
voltage1.append(v1)
Temp.append(temp)
RH.append(rh)
drawnow(Fig1)
plt.pause(0.00010)
else:
saveFile.close()
I also had a same problem.
It was solved by using set_data() like blow link.
Draw now and Matplotlib

Categories