python script for measuring battery time - python

I was looking for a script to log battery time (i.e. total time laptop runs on battery). I thought I'd give writing one a shot in python. I'm an beginner at python and came up with this using a lot of examples from this site :D
#!/usr/bin/env python
import subprocess, os
from datetime import datetime
time = (datetime.now()).strftime('%H:%M:%S')
date = (datetime.today()).strftime('%d/%m/%y')
def start(x):
if x[2] == 'Discharging' and int(x[3][:-1]) in range(98, 101):
batt_log = open('/home/saad/Code/batt_log', 'w')
batt_log.write(time + '%s' %(os.linesep))
batt_log.close()
def end(x):
if x[2] == 'Discharging' and int(x[3][:-1]) in range(1, 11):
batt_log = open('/home/saad/Code/batt_log', 'a')
batt_log.write(time)
batt_log.close()
def main():
output = subprocess.check_output('acpi -b', shell=True)
l = (output.replace(',', '')).split(' ')
if not (l[2] in ['Charging', 'Full'] or int(l[3][:-1]) in range(11, 98)):
start(l)
end(l)
ts = []
batt_log = open('/home/saad/Code/batt_log', 'r')
all_lines = batt_log.readlines()
for line in all_lines:
ts.append(line.replace(os.linesep, ''))
if len(ts) > 1:
FMT = '%H:%M:%S'
tdelta = datetime.strptime(ts[1], FMT) - datetime.strptime(ts[0], FMT)
batt_store = open('/home/saad/Code/batt_store', 'a')
batt_store.write(date + '\nTotal Time: ' + str(tdelta) + '\n')
batt_store.close()
batt_store = open('/home/saad/Code/batt_store', 'r')
all_lines = batt_store.readlines()
print "Last Battery Time:", all_lines[-1][-8:]
if __name__ == '__main__':
main()
The script actually works but I'd like it to be better. It uses system acpi command to get battery stats, writes them to one file (batt_log) to store start and end times, then reads from that file, calculates the time difference and writes it to another file (batt_store). I run it every 5 minutes as a process.
What I'd like to do is to maybe use less file I/O operations and find a way to store values persistently in the program. Any ideas welcome.

Getting data through command is far easier. In essence what acpi command would be doing is opening a file descriptor on specific file node in /dev/ . You could look at dbus interfaces for getting the information.
Regarding opening and closing of files, you could again use services like dbus or gconf but it is just easier writing a file.

Related

imported function is not working when using it from another modules

I have been trying understand what is wrong with my code with no success...
I have two.py file which I have written with some function logs.py supposed to write an input to a file
and monitor_mode.py use thous function
When running the log.py as main everything just work fine and the file is created and written on, however when trying to use the same function in monitor_mode.py nothings seems to be written to the files and I have no idea why
I did try to debug and the code is directed to to right function and everything is going as excepted except there is no creation or data written to the file
thanks for any help
logs.py
serviceList = 'serviceList.txt'
statusLog = 'statusLog.txt'
def print_to_file(file_name, input):
with open(file_name, 'a+') as write_obj:
write_obj.write(input + '\n')
write_obj.close()
def add_timestamp(input):
timestamp = '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + datetime.datetime.now().strftime(
"%Y-%m-%d %H:%M:%S") + '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
input = timestamp + '\n' + input
return input
if __name__ == "__main__":
import services
for i in range(3):
proc = services.list_of_process()
proc = add_timestamp(proc)
print_to_file(serviceList, proc)
monitor_mode.py
import logs
import services
serviceList = 'serviceList.txt'
statusLog = 'statusLog.txt'
def updates_log():
proc = services.list_of_process()
proc = logs.add_timestamp(proc)
logs.print_to_file(serviceList, proc)
print('Updates Logs\n' + proc)
if __name__ == "__main__":
for i in range(3):
updates_log()
EDIT1.1
the above code is running on ubuntu16.8
when running the code on win10 machine its working just fine.
services.list_of_process() - return a string

How to process access log using python multiprocessing library?

I have to parse 30 days access logs from the server based on client IP and accessed hosts and need to know top 10 accessed sites. The log file will be around 10-20 GB in size which takes lots of time for single threaded execution of script. Initially, I wrote a script which was working fine but it is taking a lot of time to due to large log file size. Then I tried to implement multiprocessing library for parallel processing but it is not working. It seems implementation of multiprocessing is repeating tasks instead of doing parallel processing. Not sure, what is wrong in the code. Can some one please help on this? Thank you so much in advance for your help.
Code:
from datetime import datetime, timedelta
import commands
import os
import string
import sys
import multiprocessing
def ipauth (slave_list, static_ip_list):
file_record = open('/home/access/top10_domain_accessed/logs/combined_log.txt', 'a')
count = 1
while (count <=30):
Nth_days = datetime.now() - timedelta(days=count)
date = Nth_days.strftime("%Y%m%d")
yr_month = Nth_days.strftime("%Y/%m")
file_name = 'local2' + '.' + date
with open(slave_list) as file:
for line in file:
string = line.split()
slave = string[0]
proxy = string[1]
log_path = "/LOGS/%s/%s" %(slave, yr_month)
try:
os.path.exists(log_path)
file_read = os.path.join(log_path, file_name)
with open(file_read) as log:
for log_line in log:
log_line = log_line.strip()
if proxy in log_line:
file_record.write(log_line + '\n')
except IOError:
pass
count = count + 1
file_log = open('/home/access/top10_domain_accessed/logs/ipauth_logs.txt', 'a')
with open(static_ip_list) as ip:
for line in ip:
with open('/home/access/top10_domain_accessed/logs/combined_log.txt','r') as f:
for content in f:
log_split = content.split()
client_ip = log_split[7]
if client_ip in line:
content = str(content).strip()
file_log.write(content + '\n')
return
if __name__ == '__main__':
slave_list = sys.argv[1]
static_ip_list = sys.argv[2]
jobs = []
for i in range(5):
p = multiprocessing.Process(target=ipauth, args=(slave_list, static_ip_list))
jobs.append(p)
p.start()
p.join()
UPDATE AFTER CONVERSATION WITH OP, PLEASE SEE COMMENTS
My take: Split the file into smaller chunks and use a process pool to work on those chunks:
import multiprocessing
def chunk_of_lines(fp, n):
# read n lines from file
# then yield
pass
def process(lines):
pass # do stuff to a file
p = multiprocessing.Pool()
fp = open(slave_list)
for f in chunk_of_lines(fp,10):
p.apply_async(process, [f,static_ip_list])
p.close()
p.join() # Wait for all child processes to close.
There are many ways to implement the chunk_of_lines method, you could iterate over the file lines using a simple for or do something more advance like call fp.read().

Trying to parallelize a for loop, which calls a function with arguments

I'm new to python, and especially new to multiprocessing/multithreading. I have trouble reading the documentation, or finding a sufficiently similar example to work off of.
The part that I am trying to divide among multiple cores is italicized, the rest is there for context. There are three functions that are defined elsewhere in the code, NextFunction(), QualFunction(), and PrintFunction(). I don't think what they do is critical to parallelizing this code, so I did not include their definitions.
Can you help me parallelize this?
So far, I've looked at
https://docs.python.org/2/library/multiprocessing.html
Python Multiprocessing a for loop
and I've tried the equivalents for multithreading, and I've tried ipython.parallel as well.
The code is intended to pull data from a file, process it through a few functions and print it, checking for various conditions along the way.
The code looks like:
def main(arg, obj1Name, obj2Name):
global dblen
records = fasta(refName)
for i,r in enumerate(records):
s = r.fastasequence
idnt = s.name.split()[0]
reference[idnt] = s.seq
names[i] = idnt
dblen += len(s.seq)
if taxNm == None: taxid[idnt] = GetTaxId(idnt).strip()
records.close()
print >> stderr, "Read it"
# read the taxids
if taxNm != None:
file = open(taxNm, "r")
for line in file:
idnt,tax = line.strip().split()
taxid[idnt] = tax
file.close()
File1 = pysam.Samfile(obj1Name, "rb")
File2 = pysam.Samfile(obj2Name, "rb")
***for obj1s,obj2s in NextFunction(File1, File2):
qobj1 = []
qobj2 = []
lobj1s = list(QualFunction(obj1s))
lobj2s = list(QualFunction(obj2s))
for obj1,ftrs1 in lobj1s:
for obj2,ftrs2 in lobj2s:
if (obj1.tid == obj2.tid):
qobj1.append((obj1,ftrs1))
qobj2.append((obj2,ftrs2))
for obj,ftrs in qobj1:
PrintFunction(obj, ftrs, "1")
for obj,ftrs in qobj2:
PrintFunctiont(obj, ftrs, "2")***
File1.close()
File2.close()
And is called by
if __name__ == "__main__":
etc

Need to generate a new text file and save it every time I run a script in python

I currently have a program that appends to an already existing file called "ConcentrationData.txt". However, I would like to create a new text file every time the program is run, preferably with a file name that has the date and time. This is what my current script looks like:
def measureSample(self):
sys.stdout.flush()
freqD1, trandD1, absoD1 = dev.getMeasurement(LED_TO_COLOR='D1'])
freqD2, trandD2, absoD2 = dev.getMeasurement(LED_TO_COLOR='D2'])
absoDiff= absoD1 - absoD2
Coeff= 1
Conc = absoDiff/Coeff
Conc3SD = '{Value:1.{digits}f'.format(Value = Conc, digits=3)
self.textEdit.clear()
self.textEdit.setText('Concentration is {0}'.format(Conc3SD))
timeStr = time.strftime('%m-%d-%Y %H:%M:%S %Z')
outFile = open('ConcentrationData.txt','a')
outFile.write('{0} || Concentration: {1}'.format(timeStr, Conc3SD))
outFile.close()
How would I go about doing that?
(Also, I'm pretty new to python so I'm sorry if this sounds like a silly question).
You can do something on the lines of the following
class my_class:
_data_fd = None
def __init__(self,create,filename):
if(create):
self._data_fd = open(filename,'w')
def __del__(self):
if(self._data_fd != None):
self._data_fd.close()
def measureSample(self):
##do something here
outFile = self._data_fd
outFile.write('{0} || Concentration: {1}'.format(timeStr, Conc3SD))
if __name__ == '__main__':
timeStr = time.strftime('%m-%d-%Y_%H_%M_%S_%Z') #use unerscore instead of spaces
filename = "{0}.{1}".format("Data.txt",timeStr)
imy_class = my_class(1,filename)
imy_class.measureSample()
imy_class.measureSample() ##call multiple times the fd remains open for the lifetime of the object
del imy_class ### the file closes now and you will have multiple lines of data

Python reload textfile into string every x seconds in a loop

I am trying to read a text file into a string, do something with the string, then ever X seconds,
re-read the text file (in case it has changed) to update the string and do the same thing again, over and over in a loop, without spawning an infinite number of processes.
so something like :
in an infinite loop
open 'MyTextFile' and read it into a String
do stuff with the string of text it reads from the file
close the file (if I need to...to allow another script to write to it)
wait x seconds before clearing the string and re-reading the
same 'MyTextFile' file to update the string and do it all again
(in an infinite loop until I tell it to stop)
What would be a good sturdy (reliable to run for a long time) way to do that?
Thanks!
<code>
import os
import time
myDataFile = "MyData.txt"
data = ""
def ReadFile(data):
# only need to read it if data has changed
# detect 'modified' date of myDataFile and only PrintToPrinter IF it has change
# and if it has NOT changed, wait 10 seconds and check again
# IF CHANGED
with open (myDataFile, "r") as myFile:
try:
data=myFile.read().replace('\n', ' ')
finally:
myFile.close()
ShowOnScreen(data)
PrintToPrinter(data)
# ELSE
# data has not changed..
# no need to print it so I shouldn't send back to PrintToPrinter
# but I still want to ShowOnScreen(data) on screen
# and keep the program running to check for changes
ShowOnScreen(data)
sleep(10)
ReadFile(data)
def ShowOnScreen(data):
print(time+' '+data)
def PrintToPrinter(data):
# send DateTimeStamp+data to printer here
# go back to checking for changes
ReadFile(data)
# kick off the program to start looking for changes
# data starts out as "" so it should always read the first time
# and send to printer and screen
ReadFile(data)
</code>
This can be easily done using the time.sleep from the time module. A sample code is pasted below:
import time
def file_handler(filename):
with open(filename) as fh:
line = fh.read()
print line
print "Length of string:%s"% len(line)
fh.close()
while True:
file_handler("test.txt")
time.sleep(10)
Ended up doing something like this
#!/usr/bin/env python
import sys
import time from sleep
import datetime
import time
# probably should not import both time and datetime
MyFile = "MyFile.txt"
MyText = ""
MyDate = ""
def GetDateTime():
MyDate = time.strftime("%x %I:%M %p")
return MyDate+" "
def ReadFile(MyText):
with open (MyFile, "r" as myfile:
try:
MyText2=myfile.read().replace('\n',' ')
finally:
myfile.close()
CurDateTime = GetDateTime()
if (MyText == MyText2):
# file hasn't changed...print to screen only
print CurDateTime
sleep(5)
ReadFile(MyText2)
else:
# print to screen and printer
print CurDateTime +MyText2
# send CurDateTime +MyText2 to printer
sleep(5)
ReadFile(MyText2)
ReadFile(MyText)
Hope this helps somebody

Categories