append dictionary, save to a file and increment dictionary regularly - python

I create a dictionary and save it to a file using json. The code takes input and updates the dictionary regularly, but unfortunately I can't get it to write the dictionary properly.
Following is the code that I have written. Have a look in here :
import os, sys, pickle, re, json
from optparse import OptionParser
parser = OptionParser("Store Daily Intakes \n python [Options] <-h help>")
parser.add_option("-n", "--ndays", dest="ndays", action="store", type="int", help="Input the day")
parser.add_option("-m", "--morning", dest="morning", action="store", type="string", help="Input the morning intake format <Banana-1pc,Bread-1pc,CottageChees-2tbs>")
parser.add_option("-l", "--lunch", dest="lunch", action="store", type="string", help="Input the Lunch intake format <Rice-2tbs,Roti-1pc,ChickenCurry-2tbs,Dal-2tbs>")
parser.add_option("-a", "--afternoon", dest="afternoon", action="store", type="string", help="Input the afternoon intake format <Cornflakes-2tbs,Banana-1pc>")
parser.add_option("-d", "--dinner", dest="dinner", action="store", type="string", help="Input the dinner intake format <Pasta-20gms, Cheese-2slice>")
(options, args) = parser.parse_args()
if options.ndays is None or options.morning is None or options.lunch is None or options.afternoon is None or options.dinner is None :
print parser.print_help()
exit(-1)
if os.path.isfile("./DailyInTakeFile.json") is True :
jout = file('./DailyInTakeFile.json','r') # read mode
CurDct = json.load(jout)
print CurDct
DailyInTake = dict()
DailyInTake["%d" % options.ndays] = {}
din = DailyInTake["%s" % options.ndays]
din['Morning'] = options.morning
din['Lunch'] = options.lunch
din['Afternoon'] = options.afternoon
din['Dinner'] = options.dinner
saved = sys.stdout
ofile = file('DailyInTakeFile.json', 'a') # append mode
for idx in CurDct.keys() :
if int(idx) == options.ndays :
print idx, options.ndays
print "The Intake for day # %d exists" %options.ndays
print "Are you sure you want to overwrite: Type [yes/no]"
lett=sys.stdin.read()
if "yes" in lett :
CurDct[idx]['Morning'] = options.morning
CurDct[idx]['Lunch'] = options.lunch
CurDct[idx]['Afternoon'] = options.afternoon
CurDct[idx]['Dinner'] = options.dinner
ofile.close()
sys.exit("Exiting after updating day # %d" % options.ndays)
else :
ofile.close()
sys.exit("Exiting without update")
else :
sys.stdout = ofile
print json.dumps(DailyInTake)
print ","
sys.stdout = saved
ofile.close()
else :
DailyInTake = dict()
DailyInTake["%d" % options.ndays] = {}
din = DailyInTake["%s" % options.ndays]
din['Morning'] = options.morning
din['Lunch'] = options.lunch
din['Afternoon'] = options.afternoon
din['Dinner'] = options.dinner
#print DailyInTake
saved = sys.stdout
ofile = file('DailyInTakeFile.json', 'a') # append mode
sys.stdout = ofile
print json.dumps(DailyInTake)
print ","
sys.stdout = saved
ofile.close()
from datetime import date, timedelta
from subprocess import call
call("cp DailyInTakeFile.json DailyInTakeFile.json.%s" % str(date.today()), shell=True)
The output json file from this code is the following for example :
{"1": {"Lunch": "l3", "Dinner": "d3", "Afternoon": "a3", "Morning": "m3"}}
{"2": {"Lunch": "l3", "Dinner": "d3", "Afternoon": "a3", "Morning": "m3"}}
As you can see it is just adding a single dictionary each time rather than appending to the first one created. I just can't think it out anymore. Any help will be appreciated.
UPDATE WITH CODE THAT MAINLY CHANGED
saved = sys.stdout
for idx in CurDct.keys() :
if int(idx) == options.ndays :
print idx, options.ndays
print "The Intake for day # %d exists" %options.ndays
print "Are you sure you want to overwrite: Type [yes/no]"
lett=sys.stdin.read()
if "yes" in lett :
ofile = file('DailyInTakeFile.json', 'w') # write mode
sys.stdout = ofile
CurDct.update(DailyInTake)
print json.dumps(CurDct)
sys.stdout = saved
ofile.close()
sys.exit("Exiting after updating day # %d" % options.ndays)
else :
sys.exit("Exiting without update")
else :
ofile = file('DailyInTakeFile.json', 'w') # write mode
sys.stdout = ofile
CurDct.update(DailyInTake)
print json.dumps(CurDct)
sys.stdout = saved
ofile.close()

According to the code, you create a new dictionary every time. And don't append to the old one in the file. DailyInTake = dict() So output to the file, just appends a new dictionary.
My suggestion would be.
To add the new dictionary index to CurDct as CurDct[index] = DailyInTake[index], then dump the whole dictionary back to the file. You can open the file for writing other than appending.

Using JSON serialisation as a mutable data storage backend seems like a rather odd solution. Without looking at your code in detail, I suggest using one of the solutions that are meant to be used this way. The most suitable one for this case seems to be the shelve module.

Related

Has anyone gotten Dpapi and Roaming Profiles to work?

According to Microsoft, DPAPI should be able to encrypt data on one machine, and decrypt it on another:
See: https://support.microsoft.com/en-us/topic/bf374083-626f-3446-2a9d-3f6077723a60#bkmk_6
When I am logged into a domain controller, and encrypt a file, I expect to be able to log out, transfer and decrypt it on another machine logged in on the same user.
However, I get this error:
error: (-2146893813, 'CryptProtectData', 'Key not valid for use in specified state.')
Which implies that the "roaming" didn't work. I'm assuming there are some group policy things I need to set to get those creds to roam properly.
Also, if there's a better way to do this (some other api to use the logged-in user's existing creds), I'm ok with that.
Here's the script I use to test:
import argparse
import os
import sys
from win32crypt import CryptProtectData, CryptUnprotectData
def dpapi_encrypt(fin, fout):
dat = fin.read()
fout.write(CryptProtectData(dat))
def dpapi_decrypt(fin, fout):
(_descr, dat) = CryptUnprotectData(fin.read())
if dat and dat[-1] == 0:
dat = dat[:-1]
fout.write(dat)
def do_fileop(file, op):
if file == "-":
fin = sys.stdin.buffer
fout = sys.stdout.buffer
op(fin, fout)
else:
with open(file, "rb") as fin:
tmp = file + ".dpapi-enc"
with open(tmp, "wb") as fout:
op(fin, fout)
os.replace(tmp, file)
def encrypt_file(file):
do_fileop(file, dpapi_encrypt)
def decrypt_file(file):
do_fileop(file, dpapi_decrypt)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("file")
parser.add_argument("--encrypt", "-e", action="store_true")
parser.add_argument("--decrypt", "-d", action="store_true")
args = parser.parse_args()
if args.encrypt:
encrypt_file(args.file)
elif args.decrypt:
decrypt_file(args.file)
else:
print("error: specify --encrypt or --decrypt", file=sys.stdout)
if __name__ == "__main__":
main()

How do I pass arguments from command line to the python script to read and update values of certain keys present in a json file

I have certain data in a json file (say, example.json),
example.json
data = {
'name' : 'Williams',
'working': False,
'college': ['NYU','SU','OU'],
'NYU' : {
'student' : True,
'professor': False,
'years' : {
'fresher' : '1',
'sophomore': '2',
'final' : '3'
}
}
}
I wish to write a code wherein I can give the arguments on Command line, i.e. suppose if a script is saved in a file 'script.py', then,
In the terminal: If I enter *$ python3* script.py --get name --get NYU.student Then it outputs name=Williams
NYU.student=True
If I enter *$ python3* script.py' --set name=Tom --set NYU.student=False
Then, it updates name and NYU.student keys in the dictionay to Tom and False and outputs NYU.student=Tom and NYU.student=False on the command line.
I have tried the following code for the python script (i.e. script.py)
script.py
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command")
parser.add_argument("--set", help="second command")
args=parser.parse_args()
with open('example.json','r') as read_file:
data=json.load(read_file)
if args.set == None:
key = ' '.join(args.get[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(now)
elif args.get == Null:
key, value = ' '.join(args.set[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
However, my script is not working as I expect it to? Kindly, help me with the script
Your code has the following issues:
When joining the argument values in line number 23 and 35, you use a space. This leads to the "Error key" value. Removing the space will solve the issue.
key = ''.join(arg[:])
You defined the arguments to only pass one value. Not multiple. Therefore even if you pass multiple --get or --set values, the script only gets one value. Adding action="append" to line number 9 and 10 will solve the issue.
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
Full code:
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
args=parser.parse_args()
try:
with open('example.json','r') as read_file:
data=json.load(read_file)
except IOError:
print("ERROR: File not found")
exit()
if args.set == None:
for arg in args.get:
key = ''.join(arg[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg} = {now}")
elif args.get == None:
for arg in args.set:
key, value = ''.join(arg[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg}")
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
here is the get part of the question, I hope that you can continue the set part of your assignment. good luck
python test.py --get name NYU.student
import json
import pprint
import argparse
def match(data: dict, filter: str):
current = data
for f in filter.split("."):
if f not in current:
return False
current = current[f]
return current == True
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", nargs="*", help="first command")
args = parser.parse_args()
with open('example.json','r') as f:
data = json.loads(f.read())
if args.get is not None and len(args.get) == 2:
attr_name = args.get[0]
if match(data, args.get[1]):
print("{}={}".format(attr_name, data[attr_name]))
In order to pass arguments using command line make use of sys module in python3. The sys module reads the command line arguments as a list of strings. The first element in the list is always the name of the file and subsequent elements are arg1, arg2 ..... so on.
Hope the following example helps to understand the usage of sys module.
Example Command :
python filename.py 1 thisisargument2 4
The corresponding code
import sys
# Note that all the command line args will be treated as strings
# Thus type casting will be needed for proper usage
print(sys.argv[0])
print(sys.argv[1])
print(sys.argv[2])
print(sys.argv[3])
Corresponding Output
filename.py
1
thisisargument2
4
Also please make a thorough google search before posting a question on stackoverflow.

How to check if JSON file is not empty and load JSON data from file in temp folder correctly?

The problem is that the process never goes through "loaded" JSON data from a file, and I do not understand why. It always goes through creating the new file each time.
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
with open(storage_path,'r') as f:
if f.seek(2) is not 2:
data_base = json.load(f)
print('loaded that: ',data_base)
else:
f.close()
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
if data_base.get(args.key, 'Not found') == 'Not found':
if args.val is not None:
data_base.setdefault(args.key, args.val)
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)
There are quite a few issues with your code, ie
program crashing if the file does not exist:
with open(storage_path,'r') as f:
opening storage_path for writing but actually not writing anything:
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
And actually if you happened to have f.seek(2) == 2, the json.load(f) would also crash since at this point you moved the file pointer at the 3rd char so subsequent read in json.load() wouldn't get the whole content.
Here's a fixed version that should work AFAICT:
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
data_base = None
if os.path.exists(storage_path):
with open(storage_path,'r') as f:
try:
data_base = json.load(f)
print('loaded that: ',data_base)
except Exception as e:
print("got %s on json.load()" % e)
if data_base is None:
print('each time I am creating the new one')
data_base = {}
with open(storage_path,'w') as f:
json.dump(data_base, f)
# don't prevent the user to set `"Not found" as value, if might
# be a legitimate value.
# NB : you don't check if `args.key` is actually set... maybe you should ?
sentinel = object()
if data_base.get(args.key, sentinel) is sentinel:
if args.val is not None:
data_base[args.key] = args.val
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)

Read a line from a file in python

I have one file named mcelog.conf and I am reading this file in my code. Contents of the file are
no-syslog = yes # (or no to disable)
logfile = /tmp/logfile
Program will read the mcelog.conf file and will check for the no-syslog tag, if no-syslog = yes then program has to check for the tag logfile and will read the logfile tag. Can anyone let me know how I can get the value /tmp/logfile
with open('/etc/mcelog/mcelog.conf', 'r+') as fp:
for line in fp:
if re.search("no-syslog =", line) and re.search("= no", line):
memoryErrors = readLogFile("/var/log/messages")
mcelogPathFound = true
break
elif re.search("no-syslog =", line) and re.search("= yes", line):
continue
elif re.search("logfile =", line):
memoryErrors = readLogFile(line) # Here I want to pass the value "/tmp/logfile" but currently "logfile = /tmp/logfile" is getting passed
mcelogPathFound = true
break
fp.close()
You can just split the line to get the value you want:
line.split(' = ')[1]
However, you might want to look at the documentation for configparser module.
Change the code to:
with open('/etc/mcelog/mcelog.conf', 'r+') as fp:
for line in fp:
if re.search("no-syslog =", line) and re.search("= no", line):
memoryErrors = readLogFile("/var/log/messages")
mcelogPathFound = true
break
elif re.search("no-syslog =", line) and re.search("= yes", line):
continue
elif re.search("logfile =", line):
emoryErrors = readLogFile(line.split("=")[1].strip()) # Here I want to pass the value "/tmp/logfile" but currently "logfile = /tmp/logfile" is getting passed
mcelogPathFound = true
break
fp.close()
This is because you want to read only a part of the line rather the whole thing so I have just split it up by the "=" sign and then stripped it to remove any blanks
I liked the suggestion of the configparser module, so here is an example of that (Python 3)
For the given input, it will output reading /var/log/messages
import configparser, itertools
config = configparser.ConfigParser()
filename = "/tmp/mcelog.conf"
def readLogFile(filename):
if filename:
print("reading", filename)
else:
raise ValueError("unable to read file")
section = 'global'
with open(filename) as fp:
config.read_file(itertools.chain(['[{}]'.format(section)], fp), source = filename)
no_syslog = config[section]['no-syslog']
if no_syslog == 'yes':
logfile = "/var/log/messages"
elif no_syslog == 'no':
logfile = config[section]['logfile']
if logfile:
mcelogPathFound = True
memoryErrors = readLogFile(logfile)

Search, count and add - Python

properties = ["color", "font-size", "font-family", "width", "height"]
inPath = "style.css"
outPath = "output.txt"
#Open a file for reading
file = open(inPath, 'rU')
if file:
# read from the file
filecontents = file.read()
file.close()
else:
print "Error Opening File."
#Open a file for writing
file = open(outPath, 'wb')
if file:
for i in properties:
search = i
index = filecontents.find(search)
file.write(str(index), "\n")
file.close()
else:
print "Error Opening File."
seems to work, but:
It only searches a keyword once?
Its not writing to the output file. function takes exactly 1 argument
I don't want it to print the index actually, but the number of time the keyword appears.
Many thanks
First, you want .count(search), not .find(search), if what you're looking for is # of occurrences.
Second, .write() only takes a single parameter - if you want to write a newline, you need to concatenate it first, or call .write() twice.
Third, doing for i in properties: search = i is redundant; just use the name you want in your for loop.
for search in properties:
cnt = filecontents.count(search)
file.write(str(cnt) + "\n")
from itertools import imap
properties = ("color", "font-size", "font-family", "width", "height")
inPath = "style.css"
outPath = "output.txt"
try:
#Open a file for reading
filecontents = file(inPath).read()
except Exception as exc:
print exc
else:
#Open a file for writing
with open(outPath, 'wb') as out_file:
#for property in properties:
# out_string = "%s %s\n"
# out_file.write( out_string % (
# property, filecontents.count(property)))
outfile.write('\n'.join(
imap(str, imap(filecontents.count, properties))))

Categories