i have multiple directories which all contain JSON files.
I know how I could read all in ONE directory, but not how to read them in all directories without specifying the dirctory names.
I played around and came up with something like this:
import json
import os
path_to_json = 'path/to/dir/with/dirs'
json_files = [pos_json for pos_json in os.listdir(path_to_json)]
for json_file in json_files:
filename = str(json_file + "/") # here something like "*.json"
with open(filename, 'r') as myfile:
data=myfile.read()
any help is greatly appreciated
Use os.walk with str.endswith
Ex:
path_to_json = 'path/to/dir/with/dirs'
json_files = []
for root, dirs, files in os.walk(path_to_json):
for f in files:
if f.endswith('.json'): #Check for .json exten
json_files.append(os.path.join(root, f)) #append full path to file
for json_file in json_files:
with open(json_file, 'r') as myfile:
data=myfile.read()
You can use the os.walk and give the top level directory as the directory_name.
import os
root = "<path-to-dir>"
for path, subdirs, files in os.walk(root):
for filename in files:
if filename.endswith('.json'):
with open(filename, 'r') as myfile:
data = myfile.read()
Related
I have multiple path (24) of json files, I want to load all the path and open and read the file and perform the operation on each file
def main():
path = r'C:\Work\metadata'
m = os.listdir(path)
for root, dir, files in os.walk(path, topdown = True):
for f in files:
x = os.path.join(root, f)
if x.endswith("_metameta.json"):
# print(x)
with open(x, 'r') as fp:
data = json.load(fp)
I am new in json and in python also help me to find out the way and correct me
thank you
I want to open and read several text files. The plan is to find a string in the text files and print the whole line from the string. The thing is, I canĀ“t open the paths from the array.
I hope it is unterstandable what I want to try.
import os
from os import listdir
from os.path import join
from config import cred
path = (r"E:\Utorrent\Leaked_txt")
for filename in os.listdir(path):
list = [os.path.join(path, filename)]
print(list)
for i in range(len(list)-1):
with open(str(list[i], "r")) as f:
for line in f:
if cred in line:
print(line)
Thanks :D
I prefer to use glob when reading several files in a directory
import glob
files = glob.glob(r"E:\Utorrent\Leaked_txt\*.txt") # read all txt files in folder
for file in files: # iterate over files
with open(file, 'r') as f: # read file
for line in f.read(): # iterate over lines in each file
if cred in line: # if some string is in line
print(line) # print the line
With os, you can do something like this:
import os
from config import cred
path = "E:/Utorrent/Leaked_txt"
files = [os.path.join(path, file) for file in os.listdir(path) if file.endswith(".txt")]
for file in files:
with open(file, "r") as f:
for line in f.readlines():
if cred in line:
print(line)
Edit
os.listdir only includes files from the parent directory (specified by path). To get the .txt files from all sub-directories, use the following:
files = list()
for root, _, f in os.walk(path):
files += [os.path.join(root, file) for file in f if file.endswith(".txt")]
I am trying to find a string that is contained in files under a directory. Then make it to store it's file names and directories under a new text file or something.
I got upto where it is going through a directory and finding a string, then printing a result. But not sure of the next step.
Please help, I'm completely new to coding and python.
import glob, os
#Open a source as a file and assign it as source
source = open('target.txt').read()
filedirectories = []
#locating the source file and printing the directories.
os.chdir("/Users/a1003584/desktop")
for root, dirs, files in os.walk(".", topdown=True):
for name in files:
print(os.path.join(root, name))
if source in open(os.path.join(root, name)).read():
print 'treasure found.'
Don't do a string comparison if your looking for a dictionary. Instead use the json module. Like this.
import json
import os
filesFound = []
def searchDir(dirName):
for name in os.listdir(dirName):
# If it is a file.
if os.isfile(dirName+name):
try:
fileCon = json.load(dirName+name)
except:
print("None json file.")
if "KeySearchedFor" in fileCon:
filesFound.append(dirName+name)
# If it is a directory.
else:
searchDir(dirName+name+'/')
# Change this to the directory your looking in.
searchDir("~/Desktop")
open("~/Desktop/OutFile.txt",'w').write(filesFound)
This should write the output to a csv file
import csv
import os
with open('target.txt') as infile: source = infile.read()
with open("output.csv", 'w') as fout:
outfile = csv.writer(fout)
outfile.writerow("Directory FileName FilePath".split())
for root, dirnames, fnames in os.walk("/Users/a1003584/desktop", topdown=True):
for fname in fnames:
with open(os.path.join(root, fname)) as infile:
if source not in infile.read(): continue
outfile.writerow(root, fname, os.path.join(root, fname))
I am new to python. I am trying to read multiple files one by one from subfolders, do some processing and output. Below is my code:
import os
rootdir = 'dir'
for subdir, dirs, files in os.walk(rootdir):
for fname in files:
print os.path.join(subdir, fname)
f = open(fname, 'r')
lines = f.readlines()
f.close()
f = open(fname, 'w')
for line in lines:
f['X1'] = f['X1'].astype(str)
But I am having the following error:
IOError: [Errno2] No such file or directory : 'test.txt'
Your problem is that you are doing this:
os.path.join(subdir, fname)
But you are not assigning it to a variable, so when you do this:
f=open(fname,'r')
You are still only using the filename.
You should do this:
file_path = os.path.join(subdir, fname)
So now you actually have the file path. Then this:
f=open(file_path,'r')
I'm trying to parse all files in a folder and write the filenames in a CSV using Python. The code I used is
import os, csv
f=open("C:/Users/Amber/weights.csv",'r+')
w=csv.writer(f)
for path, dirs, files in os.walk("C:/Users/Amber/Creator"):
for filename in files:
w.writerow(filename)
The result I'm getting in the CSV has individual alphabets in one column rather than the entire row name. How to fix that?
import os, csv
f=open("C:/Users/Amber/weights.csv",'r+')
w=csv.writer(f)
for path, dirs, files in os.walk("C:/Users/Amber/Creator"):
for filename in files:
w.writerow([filename])
writerow() expects a sequence argument:
import os, csv
with open("C:/Users/Amber/weights.csv", 'w') as f:
writer = csv.writer(f)
for path, dirs, files in os.walk("C:/Users/Amber/Creator"):
for filename in files:
writer.writerow([filename])
import csv
import glob
with open('csvinput.csv', 'w') as f:
writer = csv.writer(f)
a = glob.glob('filepath/*.png')
writer.writerows(zip(a)) #if you need the results in a column
import os
if __name__ == "__main__":
datapath = open('output.csv", 'w')
folderpath = 'C:\\Users\\kppra\\Desktop\\Data'
for (root,dirs,files) in os.walk(folderpath,topdown=True):
for f in files:
datapath.write(f)
datapath.write('\n')
datapath.close()