Python looping through files and saving content to dict - python

I have the following code:
import os
import json
import ipaddress
iplist = []
ipiflist = []
mydict = {}
for filename in os.listdir('data/'):
with open(os.path.join('data/', filename), 'r') as f:
data = json.load(f)
mydict.update(data)
print(mydict)
In the data directory there are several JSON files that I open in this loop.
I update the dict in every loop and for this reason I get the following output:
{'ipif_1001': '10.10.160.129', 'ipif_1002': '10.10.160.142', 'ipif_1003': '10.10.160.169', 'ipif_1004': '10.10.160.173', 'ipif_3334': '10.10.160.194', 'IpIf3337': '10.10.160.126'}
{'ipif_1001': '10.10.160.129', 'ipif_1002': '10.10.160.142', 'ipif_1003': '10.10.160.170', 'ipif_1004': '10.10.160.174', 'ipif_3334': '10.10.160.194', 'IpIf3337': '10.10.160.126', 'ipif_1005': '10.10.160.178', 'ipif_1006': '10.10.160.182'}
{'ipif_1001': '10.10.160.129', 'ipif_1002': '10.10.160.142', 'ipif_1003': '10.10.160.170', 'ipif_1004': '10.10.160.174', 'ipif_3334': '10.10.160.194', 'IpIf3337': '10.10.160.126', 'ipif_1005': '10.10.160.178', 'ipif_1006': '10.10.160.182', 'IpIf1001': '10.10.160.138', 'IpIf1002': '10.10.160.141', 'IpIf1003': '10.10.160.153', 'IpIf1006': '10.10.160.181', 'IpIf_CPEDCN': '10.10.160.241', 'IpIf_DCNMgt': '10.10.191.253', 'ipif1164': '10.10.160.166', 'IpIf1010': '10.10.170.1'}
I only need the summarized output from the last loop. How can I only access this?
Thanks for your help

The for loop in python has an else statement, which will only be executed when the loop was successful. Thus there you can plot your last resut?
for filename in os.listdir('data/'):
with open(os.path.join('data/', filename), 'r') as f:
data = json.load(f)
mydict.update(data)
else:
print(mydict)

import os
import json
import ipaddress
iplist = []
ipiflist = []
mydict = {}
list = os.listdir('data/')
for filename in os.listdir('data/'):
with open(os.path.join('data/', filename), 'r') as f:
data = json.load(f)
if list[list.count-1] == filename: #check last filename in the directory with the current filename in the loop
mydict.update(data)
print(mydict)
Try it like this

Related

How to use elements in list by order

My goal is to change multiple csv files in a folder into JSON.
First, I needed to list my csv files
for file in os.listdir("C:/Users/folder_to_csv"):
filename = os.fsdecode(file)
if filename.endswith(".csv"):
#check if csv files are listed correctly
print(os.path.join("C:/Users/folder_to_csv", filename))
With this, I was able to call csv files in that folder.
Result:
C:/Users/folder_to_csv\file_1.csv C:/Users/folder_to_csv\file_2.csv C:/Users/folder_to_csv\file_3.csv
Then, I wanted to use all of the csv files in 'csvlist' to jsonObj, however for some reason, my codes are only using the first file (C:/Users/folder_to_csv\file_1.csv)
This is what I have tried so far:
import json
import csv
import requests
import threading
import os
for file in os.listdir("C:/Users/folder_to_csv"):
filename = os.fsdecode(file)
if filename.endswith(".csv"):
csvlist = os.path.join("C:/Users/folder_to_csv", filename)
data = {}
def main():
#loop csv list so my codes can read all csv files
length = len(csvlist)
for i in range(length):
i += 1
path = csvlist
#switch csv to json
with open(path, mode='r') as f:
reader = csv.DictReader(f)
processdata = [row for row in reader]
dlist = processdata
jsonObj = json.dumps(dlist)
})
print(jsonObj)
main()
In the initial loop, you keep redefining the csvlist variable. I suppose you want it to be a list? Then just create an initial empty list and append to it instead of redefining
csvlist = []
...
csvlist.append(os.path.join("C:/Users/folder_to_csv", filename))

Python - changing content of .txt files from folder and saving in new folder

I need to change some key words in multiple .txt files, using dictionary strucure for this. Then, save changed files in new localization. I write code attached below, but when I run it is warking all the time, and when I break it there is only one empty file cretead.
import os
import os.path
from pathlib import Path
dir_path = Path("C:\\Users\\myuser\\Documents\\scripts_new")
#loading pair of words from txt file into dictionary
myfile = open("C:\\Users\\myuser\\Desktop\\Python\\dictionary.txt")
data_dict = {}
for line in myfile:
k, v = line.strip().split(':')
data_dict[k.strip()] = v.strip()
myfile.close()
# Get the list of all files and directories
path_dir = "C:\\Users\\myuser\\Documents\\scripts"
# iterate over files in
# that directory
for filename in os.listdir(path_dir):
f = os.path.join(path_dir, filename)
name = os.path.join(filename)
text_file = open(f)
#read whole file to a string
sample_string = text_file.read()
# Iterate over all key-value pairs in dictionary
for key, value in data_dict.items():
# Replace key character with value character in string
sample_string = sample_string.replace(key, value)
with open(os.path.join(dir_path,name), "w") as file1:
toFile = input(sample_string)
file1.write(toFile)
I have found a solution, with a little different approach. Maybe this code might be usefull for someone:
import os
#loading pair of words from txt file into dictionary
myfile = open("C:\\Users\\user\\Desktop\\Python\\dictionary.txt")
data_dict = {}
for line in myfile:
k, v = line.strip().split(':')
data_dict[k.strip()] = v.strip()
myfile.close()
sourcepath = os.listdir("C:\\Users\\user\\Documents\\scripts\\")
for file in sourcepath:
input_file = "C:\\Users\\user\\Documents\\scripts\\" + file
print('Conversion is ongoing for: ' + input_file)
with open(input_file, 'r') as input_file:
filedata = input_file.read()
destination_path = "C:\\Users\\user\\Documents\\scripts_new\\"+ file
# Iterate over all key-value pairs in dictionary
for key, value in data_dict.items():
filedata = filedata.replace(key,value)
with open(destination_path,'w') as file:
file.write(filedata)
Hmmm... I think your problem might actually be use of the line
toFile = input(sample_string)
As that'll halt the program awaiting a user input
Anyway, it could probably do with a little organisation into functions. Even this below is a bit... meh.
import os
import os.path
from pathlib import Path
dir_path = Path("C:\\Users\\myuser\\Documents\\scripts_new")
# -----------------------------------------------------------
def load_file(fileIn):
#loading pair of words from txt file into dictionary
with open(fileIn) as myfile:
data_dict = {}
for line in myfile:
k, v = line.strip().split(':')
data_dict[k.strip()] = v.strip()
return data_dict
# -----------------------------------------------------------
def work_all_files(starting_dir, moved_dir, data_dict):
# Iterate over files within the dir - note non recursive
for filename in os.listdir(starting_dir):
f = os.path.join(starting_dir, filename)
with open(f, 'r') as f1:
#read whole file to a string
sample_string = f1.read()
new_string = replace_strings(sample_string, data_dict)
with open(os.path.join(moved_dir, filename), "w") as file1:
file1.write(new_string)
# -----------------------------------------------------------
def replace_strings(sample_string, data_dict):
# Iterate over all key-value pairs in dictionary
# and if they exist in sample_string, replace them
for key, value in data_dict.items():
# Replace key character with value character in string
sample_string = sample_string.replace(key, value)
return sample_string
# -----------------------------------------------------------
if __name__ == "__main__":
# Get the dict-val pairings first
data_dict = load_file("C:\\Users\\myuser\\Desktop\\Python\\dictionary.txt")
#Then run over all the files within dir
work_all_files("C:\\Users\\myuser\\Documents\\scripts", "C:\\Users\\myuser\\Documents\\new_scripts", data_dict)
We could have housed all this in a class and then transported a few variables around using the instance (i.e. "self") - would have been cleaner. But first step is learning to break things into functions.

Store data in diff. JSON files through Loop using Python

I'm using API call through which I get data in every iteration but the issue is I'm confused that how I can save data in JSON file of every iteration.
language : Python
Version : 3.9
import virustotal_python
from pprint import pprint
folder_path = 'C:/Users/E-TIME/PycharmProjects/FYP script/263 Hascodes in Txt Format'
count = 0
for file in glob.glob(os.path.join(folder_path, '*.txt')):
with open(file, 'r') as f:
lines = f.read()
l = lines.split(" ")
l = l[0].split('\n')
for file_id in range(0,3):
with virustotal_python.Virustotal(
"ab8421085f362f075cc88cb1468534253239be0bc482da052d8785d422aaabd7") as vtotal:
resp = vtotal.request(f"files/{l[file_id]}/behaviours")
data = resp.data
pprint(data)

Write a CSV from JSON, importing only given keys

I have JSONs reporting different values, and I want to import only some keys in a csv.
I have tried 2 approaches, but both give me some problems.
At first, I have tried this :
`import os,json
import glob
import csv
# Place your JSON data in a directory named 'data/'
src = "MYPATH"
data = []
json_pattern = os.path.join(src, '*.json')
# only json
files = glob.glob(json_pattern, recursive=True)
# Loop through files
for single_file in files:
with open(single_file, 'r') as f:
json_file = json.load(f)
try:
data.append([
json_file['name1'],
json_file['name2'],
json_file['name3'],
json_file['name4'],
])
except KeyError:
continue
# Add headers
data.insert(0, ['title_1', 'title_2', 'title_3'])
# Export to CSV.
# Add the date to the file name to avoid overwriting it each time.
csv_filename = 'name.csv'
with open((src + csv_filename), "w", newline="") as f:
writer = csv.writer(f)
writer.writerows(data)`
In this way, unfortunately, if a key is not included, the code skip the file altogether, while I want it to skip only the key.
So I tried this, instead:
import os,json
import glob
import csv
# Place your JSON data in a directory named 'data/'
src = "MY_PATH"
data = []
json_pattern = os.path.join(src, '*.json')
# Change the glob if you want to only look through files with specific names
files = glob.glob(json_pattern, recursive=True)
# Loop through files
col_name = ['name1','name2','name4']
for single_file in files:
with open(single_file, 'r') as f:
json_file = json.load(f)
for key in col_name:
try:
data.append([json_file[key]])
except KeyError:
continue
# Add headers
data.insert(0, ['title_1', 'title_2', 'title_3'])
# Export to CSV.
# Add the date to the file name to avoid overwriting it each time.
csv_filename = 'name.csv'
with open((src + csv_filename), "w", newline="") as f:
writer = csv.writer(f)
writer.writerows(data)
But in this case, each value is a new row in the csv, while I want the value from each json in a single row.
I am not an expert and I really don't know how to combine this two.
Can someone help me out?
Thanks!
If I understand what you're trying to do correctly, why not just do
# Loop through files
for single_file in files:
with open(single_file, 'r') as f:
json_file = json.load(f)
data.append([
json_file.get('name1', ''),
json_file.get('name2', ''),
json_file.get('name3', ''),
json_file.get('name4', '')
])
By using .get() you can specify the default value in case a key isn't found.

Modifying JSON key values in Python

I am trying to load a JSON file and change specific key values then save the updated entries to a new file. This JSON file has many entries with the same format. This is my furthest attempt before coming here, however it does not save the new values.
What am I missing?
#!/usr/bin/python
import simplejson as json
import names
in_file = open('Names.json', 'r')
out_file = open('Names_new.json','w')
data_file = in_file.read()
data = json.loads(data_file)
for x in data:
nickname = x['nickname']
newname = names.get_first_name()
nickname = newname
out_file.write(json.dumps(data))
out_file.close()
The problem is that you didn't change x['nickname'] when you wanted to assign newname to it. Instead, you only modified the variable nickname.
Try assigning the x['nickname'] directly:
for x in data:
x['nickname'] = names.get_first_name()
You are just dumping old JSON data again into a new file without modifying its contents.
Instead, you should change the contents of the file with newname:
#!/usr/bin/python
import simplejson as json
import names
in_file = open('Names.json', 'r')
out_file = open('Names_new.json','w')
data_file = in_file.read()
data = json.loads(data_file)
for x in data:
newname = names.get_first_name()
x['nickname'] = newname
out_file.write(json.dumps(data))
out_file.close()

Categories