How to loop and optimise data extraction - Python [duplicate] - python

I need to iterate through all .asm files inside a given directory and do some actions on them.
How can this be done in a efficient way?

Python 3.6 version of the above answer, using os - assuming that you have the directory path as a str object in a variable called directory_in_str:
import os
directory = os.fsencode(directory_in_str)
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
Or recursively, using pathlib:
from pathlib import Path
pathlist = Path(directory_in_str).glob('**/*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Use rglob to replace glob('**/*.asm') with rglob('*.asm')
This is like calling Path.glob() with '**/' added in front of the given relative pattern:
from pathlib import Path
pathlist = Path(directory_in_str).rglob('*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Original answer:
import os
for filename in os.listdir("/path/to/dir/"):
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue

This will iterate over all descendant files, not just the immediate children of the directory:
import os
for subdir, dirs, files in os.walk(rootdir):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
if filepath.endswith(".asm"):
print (filepath)

You can try using glob module:
import glob
for filepath in glob.iglob('my_dir/*.asm'):
print(filepath)
and since Python 3.5 you can search subdirectories as well:
glob.glob('**/*.txt', recursive=True) # => ['2.txt', 'sub/3.txt']
From the docs:
The glob module finds all the pathnames matching a specified pattern according to the rules used by the Unix shell, although results are returned in arbitrary order. No tilde expansion is done, but *, ?, and character ranges expressed with [] will be correctly matched.

Since Python 3.5, things are much easier with os.scandir() and 2-20x faster (source):
with os.scandir(path) as it:
for entry in it:
if entry.name.endswith(".asm") and entry.is_file():
print(entry.name, entry.path)
Using scandir() instead of listdir() can significantly increase the
performance of code that also needs file type or file attribute
information, because os.DirEntry objects expose this information if
the operating system provides it when scanning a directory. All
os.DirEntry methods may perform a system call, but is_dir() and
is_file() usually only require a system call for symbolic links;
os.DirEntry.stat() always requires a system call on Unix but only
requires one for symbolic links on Windows.

Python 3.4 and later offer pathlib in the standard library. You could do:
from pathlib import Path
asm_pths = [pth for pth in Path.cwd().iterdir()
if pth.suffix == '.asm']
Or if you don't like list comprehensions:
asm_paths = []
for pth in Path.cwd().iterdir():
if pth.suffix == '.asm':
asm_pths.append(pth)
Path objects can easily be converted to strings.

Here's how I iterate through files in Python:
import os
path = 'the/name/of/your/path'
folder = os.fsencode(path)
filenames = []
for file in os.listdir(folder):
filename = os.fsdecode(file)
if filename.endswith( ('.jpeg', '.png', '.gif') ): # whatever file types you're using...
filenames.append(filename)
filenames.sort() # now you have the filenames and can do something with them
NONE OF THESE TECHNIQUES GUARANTEE ANY ITERATION ORDERING
Yup, super unpredictable. Notice that I sort the filenames, which is important if the order of the files matters, i.e. for video frames or time dependent data collection. Be sure to put indices in your filenames though!

You can use glob for referring the directory and the list :
import glob
import os
#to get the current working directory name
cwd = os.getcwd()
#Load the images from images folder.
for f in glob.glob('images\*.jpg'):
dir_name = get_dir_name(f)
image_file_name = dir_name + '.jpg'
#To print the file name with path (path will be in string)
print (image_file_name)
To get the list of all directory in array you can use os :
os.listdir(directory)

I'm not quite happy with this implementation yet, I wanted to have a custom constructor that does DirectoryIndex._make(next(os.walk(input_path))) such that you can just pass the path you want a file listing for. Edits welcome!
import collections
import os
DirectoryIndex = collections.namedtuple('DirectoryIndex', ['root', 'dirs', 'files'])
for file_name in DirectoryIndex(*next(os.walk('.'))).files:
file_path = os.path.join(path, file_name)

I really like using the scandir directive that is built into the os library. Here is a working example:
import os
i = 0
with os.scandir('/usr/local/bin') as root_dir:
for path in root_dir:
if path.is_file():
i += 1
print(f"Full path is: {path} and just the name is: {path.name}")
print(f"{i} files scanned successfully.")

Get all the .asm files in a directory by doing this.
import os
path = "path_to_file"
file_type = '.asm'
for filename in os.listdir(path=path):
if filename.endswith(file_type):
print(filename)
print(f"{path}/{filename}")
# do something below

I don't understand why some answers are complicated. This is how I would do it with Python 2.7. Replace DIRECTORY_TO_LOOP with the directory you want to use.
import os
DIRECTORY_TO_LOOP = '/var/www/files/'
for root, dirs, files in os.walk(DIRECTORY_TO_LOOP, topdown=False):
for name in files:
print(os.path.join(root, name))

Related

Find and copy files from directories using wildcard [duplicate]

This is what I have:
glob(os.path.join('src','*.c'))
but I want to search the subfolders of src. Something like this would work:
glob(os.path.join('src','*.c'))
glob(os.path.join('src','*','*.c'))
glob(os.path.join('src','*','*','*.c'))
glob(os.path.join('src','*','*','*','*.c'))
But this is obviously limited and clunky.
pathlib.Path.rglob
Use pathlib.Path.rglob from the pathlib module, which was introduced in Python 3.5.
from pathlib import Path
for path in Path('src').rglob('*.c'):
print(path.name)
If you don't want to use pathlib, use can use glob.glob('**/*.c'), but don't forget to pass in the recursive keyword parameter and it will use inordinate amount of time on large directories.
For cases where matching files beginning with a dot (.); like files in the current directory or hidden files on Unix based system, use the os.walk solution below.
os.walk
For older Python versions, use os.walk to recursively walk a directory and fnmatch.filter to match against a simple expression:
import fnmatch
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
for filename in fnmatch.filter(filenames, '*.c'):
matches.append(os.path.join(root, filename))
For python >= 3.5 you can use **, recursive=True :
import glob
for f in glob.glob('/path/**/*.c', recursive=True):
print(f)
If recursive is True (default is False), the pattern ** will match any files and zero
or more directories and subdirectories. If the pattern is followed by
an os.sep, only directories and subdirectories match.
Python 3 Demo
Similar to other solutions, but using fnmatch.fnmatch instead of glob, since os.walk already listed the filenames:
import os, fnmatch
def find_files(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
for filename in find_files('src', '*.c'):
print 'Found C source:', filename
Also, using a generator alows you to process each file as it is found, instead of finding all the files and then processing them.
I've modified the glob module to support ** for recursive globbing, e.g:
>>> import glob2
>>> all_header_files = glob2.glob('src/**/*.c')
https://github.com/miracle2k/python-glob2/
Useful when you want to provide your users with the ability to use the ** syntax, and thus os.walk() alone is not good enough.
Starting with Python 3.4, one can use the glob() method of one of the Path classes in the new pathlib module, which supports ** wildcards. For example:
from pathlib import Path
for file_path in Path('src').glob('**/*.c'):
print(file_path) # do whatever you need with these files
Update:
Starting with Python 3.5, the same syntax is also supported by glob.glob().
import os
import fnmatch
def recursive_glob(treeroot, pattern):
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results
fnmatch gives you exactly the same patterns as glob, so this is really an excellent replacement for glob.glob with very close semantics. An iterative version (e.g. a generator), IOW a replacement for glob.iglob, is a trivial adaptation (just yield the intermediate results as you go, instead of extending a single results list to return at the end).
You'll want to use os.walk to collect filenames that match your criteria. For example:
import os
cfiles = []
for root, dirs, files in os.walk('src'):
for file in files:
if file.endswith('.c'):
cfiles.append(os.path.join(root, file))
Here's a solution with nested list comprehensions, os.walk and simple suffix matching instead of glob:
import os
cfiles = [os.path.join(root, filename)
for root, dirnames, filenames in os.walk('src')
for filename in filenames if filename.endswith('.c')]
It can be compressed to a one-liner:
import os;cfiles=[os.path.join(r,f) for r,d,fs in os.walk('src') for f in fs if f.endswith('.c')]
or generalized as a function:
import os
def recursive_glob(rootdir='.', suffix=''):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames if filename.endswith(suffix)]
cfiles = recursive_glob('src', '.c')
If you do need full glob style patterns, you can follow Alex's and
Bruno's example and use fnmatch:
import fnmatch
import os
def recursive_glob(rootdir='.', pattern='*'):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames
if fnmatch.fnmatch(filename, pattern)]
cfiles = recursive_glob('src', '*.c')
Consider pathlib.rglob().
This is like calling Path.glob() with "**/" added in front of the given relative pattern:
import pathlib
for p in pathlib.Path("src").rglob("*.c"):
print(p)
See also #taleinat's related post here and a similar post elsewhere.
import os, glob
for each in glob.glob('path/**/*.c', recursive=True):
print(f'Name with path: {each} \nName without path: {os.path.basename(each)}')
glob.glob('*.c') :matches all files ending in .c in current directory
glob.glob('*/*.c') :same as 1
glob.glob('**/*.c') :matches all files ending in .c in the immediate subdirectories only, but not in the current directory
glob.glob('*.c',recursive=True) :same as 1
glob.glob('*/*.c',recursive=True) :same as 3
glob.glob('**/*.c',recursive=True) :matches all files ending in .c in the current directory and in all subdirectories
In case this may interest anyone, I've profiled the top three proposed methods.
I have about ~500K files in the globbed folder (in total), and 2K files that match the desired pattern.
here's the (very basic) code
import glob
import json
import fnmatch
import os
from pathlib import Path
from time import time
def find_files_iglob():
return glob.iglob("./data/**/data.json", recursive=True)
def find_files_oswalk():
for root, dirnames, filenames in os.walk('data'):
for filename in fnmatch.filter(filenames, 'data.json'):
yield os.path.join(root, filename)
def find_files_rglob():
return Path('data').rglob('data.json')
t0 = time()
for f in find_files_oswalk(): pass
t1 = time()
for f in find_files_rglob(): pass
t2 = time()
for f in find_files_iglob(): pass
t3 = time()
print(t1-t0, t2-t1, t3-t2)
And the results I got were:
os_walk: ~3.6sec
rglob ~14.5sec
iglob: ~16.9sec
The platform: Ubuntu 16.04, x86_64 (core i7),
Recently I had to recover my pictures with the extension .jpg. I ran photorec and recovered 4579 directories 2.2 million files within, having tremendous variety of extensions.With the script below I was able to select 50133 files havin .jpg extension within minutes:
#!/usr/binenv python2.7
import glob
import shutil
import os
src_dir = "/home/mustafa/Masaüstü/yedek"
dst_dir = "/home/mustafa/Genel/media"
for mediafile in glob.iglob(os.path.join(src_dir, "*", "*.jpg")): #"*" is for subdirectory
shutil.copy(mediafile, dst_dir)
based on other answers this is my current working implementation, which retrieves nested xml files in a root directory:
files = []
for root, dirnames, filenames in os.walk(myDir):
files.extend(glob.glob(root + "/*.xml"))
I'm really having fun with python :)
For python 3.5 and later
import glob
#file_names_array = glob.glob('path/*.c', recursive=True)
#above works for files directly at path/ as guided by NeStack
#updated version
file_names_array = glob.glob('path/**/*.c', recursive=True)
further you might need
for full_path_in_src in file_names_array:
print (full_path_in_src ) # be like 'abc/xyz.c'
#Full system path of this would be like => 'path till src/abc/xyz.c'
Johan and Bruno provide excellent solutions on the minimal requirement as stated. I have just released Formic which implements Ant FileSet and Globs which can handle this and more complicated scenarios. An implementation of your requirement is:
import formic
fileset = formic.FileSet(include="/src/**/*.c")
for file_name in fileset.qualified_files():
print file_name
Another way to do it using just the glob module. Just seed the rglob method with a starting base directory and a pattern to match and it will return a list of matching file names.
import glob
import os
def _getDirs(base):
return [x for x in glob.iglob(os.path.join( base, '*')) if os.path.isdir(x) ]
def rglob(base, pattern):
list = []
list.extend(glob.glob(os.path.join(base,pattern)))
dirs = _getDirs(base)
if len(dirs):
for d in dirs:
list.extend(rglob(os.path.join(base,d), pattern))
return list
Or with a list comprehension:
>>> base = r"c:\User\xtofl"
>>> binfiles = [ os.path.join(base,f)
for base, _, files in os.walk(root)
for f in files if f.endswith(".jpg") ]
If the files are on a remote file system or inside an archive, you can use an implementation of the fsspec AbstractFileSystem class. For example, to list all the files in a zipfile:
from fsspec.implementations.zip import ZipFileSystem
fs = ZipFileSystem("/tmp/test.zip")
fs.glob("/**") # equivalent: fs.find("/")
or to list all the files in a publicly available S3 bucket:
from s3fs import S3FileSystem
fs_s3 = S3FileSystem(anon=True)
fs_s3.glob("noaa-goes16/ABI-L1b-RadF/2020/045/**") # or use fs_s3.find
you can also use it for a local filesystem, which may be interesting if your implementation should be filesystem-agnostic:
from fsspec.implementations.local import LocalFileSystem
fs = LocalFileSystem()
fs.glob("/tmp/test/**")
Other implementations include Google Cloud, Github, SFTP/SSH, Dropbox, and Azure. For details, see the fsspec API documentation.
Just made this.. it will print files and directory in hierarchical way
But I didn't used fnmatch or walk
#!/usr/bin/python
import os,glob,sys
def dirlist(path, c = 1):
for i in glob.glob(os.path.join(path, "*")):
if os.path.isfile(i):
filepath, filename = os.path.split(i)
print '----' *c + filename
elif os.path.isdir(i):
dirname = os.path.basename(i)
print '----' *c + dirname
c+=1
dirlist(i,c)
c-=1
path = os.path.normpath(sys.argv[1])
print(os.path.basename(path))
dirlist(path)
That one uses fnmatch or regular expression:
import fnmatch, os
def filepaths(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
try:
matched = pattern.match(basename)
except AttributeError:
matched = fnmatch.fnmatch(basename, pattern)
if matched:
yield os.path.join(root, basename)
# usage
if __name__ == '__main__':
from pprint import pprint as pp
import re
path = r'/Users/hipertracker/app/myapp'
pp([x for x in filepaths(path, re.compile(r'.*\.py$'))])
pp([x for x in filepaths(path, '*.py')])
In addition to the suggested answers, you can do this with some lazy generation and list comprehension magic:
import os, glob, itertools
results = itertools.chain.from_iterable(glob.iglob(os.path.join(root,'*.c'))
for root, dirs, files in os.walk('src'))
for f in results: print(f)
Besides fitting in one line and avoiding unnecessary lists in memory, this also has the nice side effect, that you can use it in a way similar to the ** operator, e.g., you could use os.path.join(root, 'some/path/*.c') in order to get all .c files in all sub directories of src that have this structure.
This is a working code on Python 2.7. As part of my devops work, I was required to write a script which would move the config files marked with live-appName.properties to appName.properties. There could be other extension files as well like live-appName.xml.
Below is a working code for this, which finds the files in the given directories (nested level) and then renames (moves) it to the required filename
def flipProperties(searchDir):
print "Flipping properties to point to live DB"
for root, dirnames, filenames in os.walk(searchDir):
for filename in fnmatch.filter(filenames, 'live-*.*'):
targetFileName = os.path.join(root, filename.split("live-")[1])
print "File "+ os.path.join(root, filename) + "will be moved to " + targetFileName
shutil.move(os.path.join(root, filename), targetFileName)
This function is called from a main script
flipProperties(searchDir)
Hope this helps someone struggling with similar issues.
Simplified version of Johan Dahlin's answer, without fnmatch.
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
matches += [os.path.join(root, f) for f in filenames if f[-2:] == '.c']
Here is my solution using list comprehension to search for multiple file extensions recursively in a directory and all subdirectories:
import os, glob
def _globrec(path, *exts):
""" Glob recursively a directory and all subdirectories for multiple file extensions
Note: Glob is case-insensitive, i. e. for '\*.jpg' you will get files ending
with .jpg and .JPG
Parameters
----------
path : str
A directory name
exts : tuple
File extensions to glob for
Returns
-------
files : list
list of files matching extensions in exts in path and subfolders
"""
dirs = [a[0] for a in os.walk(path)]
f_filter = [d+e for d in dirs for e in exts]
return [f for files in [glob.iglob(files) for files in f_filter] for f in files]
my_pictures = _globrec(r'C:\Temp', '\*.jpg','\*.bmp','\*.png','\*.gif')
for f in my_pictures:
print f
import sys, os, glob
dir_list = ["c:\\books\\heap"]
while len(dir_list) > 0:
cur_dir = dir_list[0]
del dir_list[0]
list_of_files = glob.glob(cur_dir+'\\*')
for book in list_of_files:
if os.path.isfile(book):
print(book)
else:
dir_list.append(book)
I modified the top answer in this posting.. and recently created this script which will loop through all files in a given directory (searchdir) and the sub-directories under it... and prints filename, rootdir, modified/creation date, and size.
Hope this helps someone... and they can walk the directory and get fileinfo.
import time
import fnmatch
import os
def fileinfo(file):
filename = os.path.basename(file)
rootdir = os.path.dirname(file)
lastmod = time.ctime(os.path.getmtime(file))
creation = time.ctime(os.path.getctime(file))
filesize = os.path.getsize(file)
print "%s**\t%s\t%s\t%s\t%s" % (rootdir, filename, lastmod, creation, filesize)
searchdir = r'D:\Your\Directory\Root'
matches = []
for root, dirnames, filenames in os.walk(searchdir):
## for filename in fnmatch.filter(filenames, '*.c'):
for filename in filenames:
## matches.append(os.path.join(root, filename))
##print matches
fileinfo(os.path.join(root, filename))
Here is a solution that will match the pattern against the full path and not just the base filename.
It uses fnmatch.translate to convert a glob-style pattern into a regular expression, which is then matched against the full path of each file found while walking the directory.
re.IGNORECASE is optional, but desirable on Windows since the file system itself is not case-sensitive. (I didn't bother compiling the regex because docs indicate it should be cached internally.)
import fnmatch
import os
import re
def findfiles(dir, pattern):
patternregex = fnmatch.translate(pattern)
for root, dirs, files in os.walk(dir):
for basename in files:
filename = os.path.join(root, basename)
if re.search(patternregex, filename, re.IGNORECASE):
yield filename
I needed a solution for python 2.x that works fast on large directories.
I endet up with this:
import subprocess
foundfiles= subprocess.check_output("ls src/*.c src/**/*.c", shell=True)
for foundfile in foundfiles.splitlines():
print foundfile
Note that you might need some exception handling in case ls doesn't find any matching file.

How can I check if there are any more JSON files remaining in a directory in Python? [duplicate]

This is what I have:
glob(os.path.join('src','*.c'))
but I want to search the subfolders of src. Something like this would work:
glob(os.path.join('src','*.c'))
glob(os.path.join('src','*','*.c'))
glob(os.path.join('src','*','*','*.c'))
glob(os.path.join('src','*','*','*','*.c'))
But this is obviously limited and clunky.
pathlib.Path.rglob
Use pathlib.Path.rglob from the pathlib module, which was introduced in Python 3.5.
from pathlib import Path
for path in Path('src').rglob('*.c'):
print(path.name)
If you don't want to use pathlib, use can use glob.glob('**/*.c'), but don't forget to pass in the recursive keyword parameter and it will use inordinate amount of time on large directories.
For cases where matching files beginning with a dot (.); like files in the current directory or hidden files on Unix based system, use the os.walk solution below.
os.walk
For older Python versions, use os.walk to recursively walk a directory and fnmatch.filter to match against a simple expression:
import fnmatch
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
for filename in fnmatch.filter(filenames, '*.c'):
matches.append(os.path.join(root, filename))
For python >= 3.5 you can use **, recursive=True :
import glob
for f in glob.glob('/path/**/*.c', recursive=True):
print(f)
If recursive is True (default is False), the pattern ** will match any files and zero
or more directories and subdirectories. If the pattern is followed by
an os.sep, only directories and subdirectories match.
Python 3 Demo
Similar to other solutions, but using fnmatch.fnmatch instead of glob, since os.walk already listed the filenames:
import os, fnmatch
def find_files(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
for filename in find_files('src', '*.c'):
print 'Found C source:', filename
Also, using a generator alows you to process each file as it is found, instead of finding all the files and then processing them.
I've modified the glob module to support ** for recursive globbing, e.g:
>>> import glob2
>>> all_header_files = glob2.glob('src/**/*.c')
https://github.com/miracle2k/python-glob2/
Useful when you want to provide your users with the ability to use the ** syntax, and thus os.walk() alone is not good enough.
Starting with Python 3.4, one can use the glob() method of one of the Path classes in the new pathlib module, which supports ** wildcards. For example:
from pathlib import Path
for file_path in Path('src').glob('**/*.c'):
print(file_path) # do whatever you need with these files
Update:
Starting with Python 3.5, the same syntax is also supported by glob.glob().
import os
import fnmatch
def recursive_glob(treeroot, pattern):
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results
fnmatch gives you exactly the same patterns as glob, so this is really an excellent replacement for glob.glob with very close semantics. An iterative version (e.g. a generator), IOW a replacement for glob.iglob, is a trivial adaptation (just yield the intermediate results as you go, instead of extending a single results list to return at the end).
You'll want to use os.walk to collect filenames that match your criteria. For example:
import os
cfiles = []
for root, dirs, files in os.walk('src'):
for file in files:
if file.endswith('.c'):
cfiles.append(os.path.join(root, file))
Here's a solution with nested list comprehensions, os.walk and simple suffix matching instead of glob:
import os
cfiles = [os.path.join(root, filename)
for root, dirnames, filenames in os.walk('src')
for filename in filenames if filename.endswith('.c')]
It can be compressed to a one-liner:
import os;cfiles=[os.path.join(r,f) for r,d,fs in os.walk('src') for f in fs if f.endswith('.c')]
or generalized as a function:
import os
def recursive_glob(rootdir='.', suffix=''):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames if filename.endswith(suffix)]
cfiles = recursive_glob('src', '.c')
If you do need full glob style patterns, you can follow Alex's and
Bruno's example and use fnmatch:
import fnmatch
import os
def recursive_glob(rootdir='.', pattern='*'):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames
if fnmatch.fnmatch(filename, pattern)]
cfiles = recursive_glob('src', '*.c')
Consider pathlib.rglob().
This is like calling Path.glob() with "**/" added in front of the given relative pattern:
import pathlib
for p in pathlib.Path("src").rglob("*.c"):
print(p)
See also #taleinat's related post here and a similar post elsewhere.
import os, glob
for each in glob.glob('path/**/*.c', recursive=True):
print(f'Name with path: {each} \nName without path: {os.path.basename(each)}')
glob.glob('*.c') :matches all files ending in .c in current directory
glob.glob('*/*.c') :same as 1
glob.glob('**/*.c') :matches all files ending in .c in the immediate subdirectories only, but not in the current directory
glob.glob('*.c',recursive=True) :same as 1
glob.glob('*/*.c',recursive=True) :same as 3
glob.glob('**/*.c',recursive=True) :matches all files ending in .c in the current directory and in all subdirectories
In case this may interest anyone, I've profiled the top three proposed methods.
I have about ~500K files in the globbed folder (in total), and 2K files that match the desired pattern.
here's the (very basic) code
import glob
import json
import fnmatch
import os
from pathlib import Path
from time import time
def find_files_iglob():
return glob.iglob("./data/**/data.json", recursive=True)
def find_files_oswalk():
for root, dirnames, filenames in os.walk('data'):
for filename in fnmatch.filter(filenames, 'data.json'):
yield os.path.join(root, filename)
def find_files_rglob():
return Path('data').rglob('data.json')
t0 = time()
for f in find_files_oswalk(): pass
t1 = time()
for f in find_files_rglob(): pass
t2 = time()
for f in find_files_iglob(): pass
t3 = time()
print(t1-t0, t2-t1, t3-t2)
And the results I got were:
os_walk: ~3.6sec
rglob ~14.5sec
iglob: ~16.9sec
The platform: Ubuntu 16.04, x86_64 (core i7),
Recently I had to recover my pictures with the extension .jpg. I ran photorec and recovered 4579 directories 2.2 million files within, having tremendous variety of extensions.With the script below I was able to select 50133 files havin .jpg extension within minutes:
#!/usr/binenv python2.7
import glob
import shutil
import os
src_dir = "/home/mustafa/Masaüstü/yedek"
dst_dir = "/home/mustafa/Genel/media"
for mediafile in glob.iglob(os.path.join(src_dir, "*", "*.jpg")): #"*" is for subdirectory
shutil.copy(mediafile, dst_dir)
based on other answers this is my current working implementation, which retrieves nested xml files in a root directory:
files = []
for root, dirnames, filenames in os.walk(myDir):
files.extend(glob.glob(root + "/*.xml"))
I'm really having fun with python :)
For python 3.5 and later
import glob
#file_names_array = glob.glob('path/*.c', recursive=True)
#above works for files directly at path/ as guided by NeStack
#updated version
file_names_array = glob.glob('path/**/*.c', recursive=True)
further you might need
for full_path_in_src in file_names_array:
print (full_path_in_src ) # be like 'abc/xyz.c'
#Full system path of this would be like => 'path till src/abc/xyz.c'
Johan and Bruno provide excellent solutions on the minimal requirement as stated. I have just released Formic which implements Ant FileSet and Globs which can handle this and more complicated scenarios. An implementation of your requirement is:
import formic
fileset = formic.FileSet(include="/src/**/*.c")
for file_name in fileset.qualified_files():
print file_name
Another way to do it using just the glob module. Just seed the rglob method with a starting base directory and a pattern to match and it will return a list of matching file names.
import glob
import os
def _getDirs(base):
return [x for x in glob.iglob(os.path.join( base, '*')) if os.path.isdir(x) ]
def rglob(base, pattern):
list = []
list.extend(glob.glob(os.path.join(base,pattern)))
dirs = _getDirs(base)
if len(dirs):
for d in dirs:
list.extend(rglob(os.path.join(base,d), pattern))
return list
Or with a list comprehension:
>>> base = r"c:\User\xtofl"
>>> binfiles = [ os.path.join(base,f)
for base, _, files in os.walk(root)
for f in files if f.endswith(".jpg") ]
If the files are on a remote file system or inside an archive, you can use an implementation of the fsspec AbstractFileSystem class. For example, to list all the files in a zipfile:
from fsspec.implementations.zip import ZipFileSystem
fs = ZipFileSystem("/tmp/test.zip")
fs.glob("/**") # equivalent: fs.find("/")
or to list all the files in a publicly available S3 bucket:
from s3fs import S3FileSystem
fs_s3 = S3FileSystem(anon=True)
fs_s3.glob("noaa-goes16/ABI-L1b-RadF/2020/045/**") # or use fs_s3.find
you can also use it for a local filesystem, which may be interesting if your implementation should be filesystem-agnostic:
from fsspec.implementations.local import LocalFileSystem
fs = LocalFileSystem()
fs.glob("/tmp/test/**")
Other implementations include Google Cloud, Github, SFTP/SSH, Dropbox, and Azure. For details, see the fsspec API documentation.
Just made this.. it will print files and directory in hierarchical way
But I didn't used fnmatch or walk
#!/usr/bin/python
import os,glob,sys
def dirlist(path, c = 1):
for i in glob.glob(os.path.join(path, "*")):
if os.path.isfile(i):
filepath, filename = os.path.split(i)
print '----' *c + filename
elif os.path.isdir(i):
dirname = os.path.basename(i)
print '----' *c + dirname
c+=1
dirlist(i,c)
c-=1
path = os.path.normpath(sys.argv[1])
print(os.path.basename(path))
dirlist(path)
That one uses fnmatch or regular expression:
import fnmatch, os
def filepaths(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
try:
matched = pattern.match(basename)
except AttributeError:
matched = fnmatch.fnmatch(basename, pattern)
if matched:
yield os.path.join(root, basename)
# usage
if __name__ == '__main__':
from pprint import pprint as pp
import re
path = r'/Users/hipertracker/app/myapp'
pp([x for x in filepaths(path, re.compile(r'.*\.py$'))])
pp([x for x in filepaths(path, '*.py')])
In addition to the suggested answers, you can do this with some lazy generation and list comprehension magic:
import os, glob, itertools
results = itertools.chain.from_iterable(glob.iglob(os.path.join(root,'*.c'))
for root, dirs, files in os.walk('src'))
for f in results: print(f)
Besides fitting in one line and avoiding unnecessary lists in memory, this also has the nice side effect, that you can use it in a way similar to the ** operator, e.g., you could use os.path.join(root, 'some/path/*.c') in order to get all .c files in all sub directories of src that have this structure.
This is a working code on Python 2.7. As part of my devops work, I was required to write a script which would move the config files marked with live-appName.properties to appName.properties. There could be other extension files as well like live-appName.xml.
Below is a working code for this, which finds the files in the given directories (nested level) and then renames (moves) it to the required filename
def flipProperties(searchDir):
print "Flipping properties to point to live DB"
for root, dirnames, filenames in os.walk(searchDir):
for filename in fnmatch.filter(filenames, 'live-*.*'):
targetFileName = os.path.join(root, filename.split("live-")[1])
print "File "+ os.path.join(root, filename) + "will be moved to " + targetFileName
shutil.move(os.path.join(root, filename), targetFileName)
This function is called from a main script
flipProperties(searchDir)
Hope this helps someone struggling with similar issues.
Simplified version of Johan Dahlin's answer, without fnmatch.
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
matches += [os.path.join(root, f) for f in filenames if f[-2:] == '.c']
Here is my solution using list comprehension to search for multiple file extensions recursively in a directory and all subdirectories:
import os, glob
def _globrec(path, *exts):
""" Glob recursively a directory and all subdirectories for multiple file extensions
Note: Glob is case-insensitive, i. e. for '\*.jpg' you will get files ending
with .jpg and .JPG
Parameters
----------
path : str
A directory name
exts : tuple
File extensions to glob for
Returns
-------
files : list
list of files matching extensions in exts in path and subfolders
"""
dirs = [a[0] for a in os.walk(path)]
f_filter = [d+e for d in dirs for e in exts]
return [f for files in [glob.iglob(files) for files in f_filter] for f in files]
my_pictures = _globrec(r'C:\Temp', '\*.jpg','\*.bmp','\*.png','\*.gif')
for f in my_pictures:
print f
import sys, os, glob
dir_list = ["c:\\books\\heap"]
while len(dir_list) > 0:
cur_dir = dir_list[0]
del dir_list[0]
list_of_files = glob.glob(cur_dir+'\\*')
for book in list_of_files:
if os.path.isfile(book):
print(book)
else:
dir_list.append(book)
I modified the top answer in this posting.. and recently created this script which will loop through all files in a given directory (searchdir) and the sub-directories under it... and prints filename, rootdir, modified/creation date, and size.
Hope this helps someone... and they can walk the directory and get fileinfo.
import time
import fnmatch
import os
def fileinfo(file):
filename = os.path.basename(file)
rootdir = os.path.dirname(file)
lastmod = time.ctime(os.path.getmtime(file))
creation = time.ctime(os.path.getctime(file))
filesize = os.path.getsize(file)
print "%s**\t%s\t%s\t%s\t%s" % (rootdir, filename, lastmod, creation, filesize)
searchdir = r'D:\Your\Directory\Root'
matches = []
for root, dirnames, filenames in os.walk(searchdir):
## for filename in fnmatch.filter(filenames, '*.c'):
for filename in filenames:
## matches.append(os.path.join(root, filename))
##print matches
fileinfo(os.path.join(root, filename))
Here is a solution that will match the pattern against the full path and not just the base filename.
It uses fnmatch.translate to convert a glob-style pattern into a regular expression, which is then matched against the full path of each file found while walking the directory.
re.IGNORECASE is optional, but desirable on Windows since the file system itself is not case-sensitive. (I didn't bother compiling the regex because docs indicate it should be cached internally.)
import fnmatch
import os
import re
def findfiles(dir, pattern):
patternregex = fnmatch.translate(pattern)
for root, dirs, files in os.walk(dir):
for basename in files:
filename = os.path.join(root, basename)
if re.search(patternregex, filename, re.IGNORECASE):
yield filename
I needed a solution for python 2.x that works fast on large directories.
I endet up with this:
import subprocess
foundfiles= subprocess.check_output("ls src/*.c src/**/*.c", shell=True)
for foundfile in foundfiles.splitlines():
print foundfile
Note that you might need some exception handling in case ls doesn't find any matching file.

How can I iterate over files in a given directory?

I need to iterate through all .asm files inside a given directory and do some actions on them.
How can this be done in a efficient way?
Python 3.6 version of the above answer, using os - assuming that you have the directory path as a str object in a variable called directory_in_str:
import os
directory = os.fsencode(directory_in_str)
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
Or recursively, using pathlib:
from pathlib import Path
pathlist = Path(directory_in_str).glob('**/*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Use rglob to replace glob('**/*.asm') with rglob('*.asm')
This is like calling Path.glob() with '**/' added in front of the given relative pattern:
from pathlib import Path
pathlist = Path(directory_in_str).rglob('*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Original answer:
import os
for filename in os.listdir("/path/to/dir/"):
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
This will iterate over all descendant files, not just the immediate children of the directory:
import os
for subdir, dirs, files in os.walk(rootdir):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
if filepath.endswith(".asm"):
print (filepath)
You can try using glob module:
import glob
for filepath in glob.iglob('my_dir/*.asm'):
print(filepath)
and since Python 3.5 you can search subdirectories as well:
glob.glob('**/*.txt', recursive=True) # => ['2.txt', 'sub/3.txt']
From the docs:
The glob module finds all the pathnames matching a specified pattern according to the rules used by the Unix shell, although results are returned in arbitrary order. No tilde expansion is done, but *, ?, and character ranges expressed with [] will be correctly matched.
Since Python 3.5, things are much easier with os.scandir() and 2-20x faster (source):
with os.scandir(path) as it:
for entry in it:
if entry.name.endswith(".asm") and entry.is_file():
print(entry.name, entry.path)
Using scandir() instead of listdir() can significantly increase the
performance of code that also needs file type or file attribute
information, because os.DirEntry objects expose this information if
the operating system provides it when scanning a directory. All
os.DirEntry methods may perform a system call, but is_dir() and
is_file() usually only require a system call for symbolic links;
os.DirEntry.stat() always requires a system call on Unix but only
requires one for symbolic links on Windows.
Python 3.4 and later offer pathlib in the standard library. You could do:
from pathlib import Path
asm_pths = [pth for pth in Path.cwd().iterdir()
if pth.suffix == '.asm']
Or if you don't like list comprehensions:
asm_paths = []
for pth in Path.cwd().iterdir():
if pth.suffix == '.asm':
asm_pths.append(pth)
Path objects can easily be converted to strings.
Here's how I iterate through files in Python:
import os
path = 'the/name/of/your/path'
folder = os.fsencode(path)
filenames = []
for file in os.listdir(folder):
filename = os.fsdecode(file)
if filename.endswith( ('.jpeg', '.png', '.gif') ): # whatever file types you're using...
filenames.append(filename)
filenames.sort() # now you have the filenames and can do something with them
NONE OF THESE TECHNIQUES GUARANTEE ANY ITERATION ORDERING
Yup, super unpredictable. Notice that I sort the filenames, which is important if the order of the files matters, i.e. for video frames or time dependent data collection. Be sure to put indices in your filenames though!
You can use glob for referring the directory and the list :
import glob
import os
#to get the current working directory name
cwd = os.getcwd()
#Load the images from images folder.
for f in glob.glob('images\*.jpg'):
dir_name = get_dir_name(f)
image_file_name = dir_name + '.jpg'
#To print the file name with path (path will be in string)
print (image_file_name)
To get the list of all directory in array you can use os :
os.listdir(directory)
I'm not quite happy with this implementation yet, I wanted to have a custom constructor that does DirectoryIndex._make(next(os.walk(input_path))) such that you can just pass the path you want a file listing for. Edits welcome!
import collections
import os
DirectoryIndex = collections.namedtuple('DirectoryIndex', ['root', 'dirs', 'files'])
for file_name in DirectoryIndex(*next(os.walk('.'))).files:
file_path = os.path.join(path, file_name)
I really like using the scandir directive that is built into the os library. Here is a working example:
import os
i = 0
with os.scandir('/usr/local/bin') as root_dir:
for path in root_dir:
if path.is_file():
i += 1
print(f"Full path is: {path} and just the name is: {path.name}")
print(f"{i} files scanned successfully.")
Get all the .asm files in a directory by doing this.
import os
path = "path_to_file"
file_type = '.asm'
for filename in os.listdir(path=path):
if filename.endswith(file_type):
print(filename)
print(f"{path}/{filename}")
# do something below
I don't understand why some answers are complicated. This is how I would do it with Python 2.7. Replace DIRECTORY_TO_LOOP with the directory you want to use.
import os
DIRECTORY_TO_LOOP = '/var/www/files/'
for root, dirs, files in os.walk(DIRECTORY_TO_LOOP, topdown=False):
for name in files:
print(os.path.join(root, name))

Find all CSV files in a directory using Python

How can I find all files in directory with the extension .csv in python?
import os
import glob
path = 'c:\\'
extension = 'csv'
os.chdir(path)
result = glob.glob('*.{}'.format(extension))
print(result)
from os import listdir
def find_csv_filenames( path_to_dir, suffix=".csv" ):
filenames = listdir(path_to_dir)
return [ filename for filename in filenames if filename.endswith( suffix ) ]
The function find_csv_filenames() returns a list of filenames as strings, that reside in the directory path_to_dir with the given suffix (by default, ".csv").
Addendum
How to print the filenames:
filenames = find_csv_filenames("my/directory")
for name in filenames:
print name
By using the combination of filters and lambda, you can easily filter out csv files in given folder.
import os
all_files = os.listdir("/path-to-dir")
csv_files = list(filter(lambda f: f.endswith('.csv'), all_files))
# lambda returns True if filename (within `all_files`) ends with .csv or else False
# and filter function uses the returned boolean value to filter .csv files from list files.
use Python OS module to find csv file in a directory.
the simple example is here :
import os
# This is the path where you want to search
path = r'd:'
# this is the extension you want to detect
extension = '.csv'
for root, dirs_list, files_list in os.walk(path):
for file_name in files_list:
if os.path.splitext(file_name)[-1] == extension:
file_name_path = os.path.join(root, file_name)
print file_name
print file_name_path # This is the full path of the filter file
I had to get csv files that were in subdirectories, therefore, using the response from tchlpr I modified it to work best for my use case:
import os
import glob
os.chdir( '/path/to/main/dir' )
result = glob.glob( '*/**.csv' )
print( result )
import os
path = 'C:/Users/Shashank/Desktop/'
os.chdir(path)
for p,n,f in os.walk(os.getcwd()):
for a in f:
a = str(a)
if a.endswith('.csv'):
print(a)
print(p)
This will help to identify path also of these csv files
While solution given by thclpr works it scans only immediate files in the directory and not files in the sub directories if any. Although this is not the requirement but just in case someone wishes to scan sub directories too below is the code that uses os.walk
import os
from glob import glob
PATH = "/home/someuser/projects/someproject"
EXT = "*.csv"
all_csv_files = [file
for path, subdir, files in os.walk(PATH)
for file in glob(os.path.join(path, EXT))]
print(all_csv_files)
Copied from this blog.
Use the python glob module to easily list out the files we need.
import glob
path_csv=glob.glob("../data/subfolrder/*.csv")
You could just use glob with recursive = true, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories.
import glob, os
os.chdir("C:\\Users\\username\\Desktop\\MAIN_DIRECTORY")
for file in glob.glob("*/.csv", recursive = true):
print(file)
This solution uses the python function filter. This function creates a list of elements for which a function returns true. In this case, the anonymous function used is partial matching '.csv' on every element of the directory files list obtained with os.listdir('the path i want to look in')
import os
filepath= 'filepath_to_my_CSVs' # for example: './my_data/'
list(filter(lambda x: '.csv' in x, os.listdir('filepath_to_my_CSVs')))
Many (linked) answers change working directory with os.chdir(). But you don't have to.
Recursively print all CSV files in /home/project/ directory:
pathname = "/home/project/**/*.csv"
for file in glob.iglob(pathname, recursive=True):
print(file)
Requires python 3.5+. From docs [1]:
pathname can be either absolute (like /usr/src/Python-1.5/Makefile) or relative (like ../../Tools/*/*.gif)
pathname can contain shell-style wildcards.
Whether or not the results are sorted depends on the file system.
If recursive is true, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories
[1] https://docs.python.org/3/library/glob.html#glob.glob
You could just use glob with recursive = True, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories.
import glob, os
os.chdir("C:\\Users\\username\\Desktop\\MAIN_DIRECTORY")
for file in glob.glob("*/*.csv", recursive = True):
print(file)
Please use this tested working code. This function will return a list of all the CSV files with absolute CSV file paths in your specified path.
import os
from glob import glob
def get_csv_files(dir_path, ext):
os.chdir(dir_path)
return list(map(lambda x: os.path.join(dir_path, x), glob(f'*.{ext}')))
print(get_csv_files("E:\\input\\dir\\path", "csv"))

How to use glob() to find files recursively?

This is what I have:
glob(os.path.join('src','*.c'))
but I want to search the subfolders of src. Something like this would work:
glob(os.path.join('src','*.c'))
glob(os.path.join('src','*','*.c'))
glob(os.path.join('src','*','*','*.c'))
glob(os.path.join('src','*','*','*','*.c'))
But this is obviously limited and clunky.
pathlib.Path.rglob
Use pathlib.Path.rglob from the pathlib module, which was introduced in Python 3.5.
from pathlib import Path
for path in Path('src').rglob('*.c'):
print(path.name)
If you don't want to use pathlib, use can use glob.glob('**/*.c'), but don't forget to pass in the recursive keyword parameter and it will use inordinate amount of time on large directories.
For cases where matching files beginning with a dot (.); like files in the current directory or hidden files on Unix based system, use the os.walk solution below.
os.walk
For older Python versions, use os.walk to recursively walk a directory and fnmatch.filter to match against a simple expression:
import fnmatch
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
for filename in fnmatch.filter(filenames, '*.c'):
matches.append(os.path.join(root, filename))
For python >= 3.5 you can use **, recursive=True, i.e.:
import glob
for f in glob.glob('/path/**/*.c', recursive=True):
print(f)
If recursive is True (default is False), the pattern ** will match any files and zero
or more directories and subdirectories. If the pattern is followed by
an os.sep, only directories and subdirectories match.
Python 3 Demo
Similar to other solutions, but using fnmatch.fnmatch instead of glob, since os.walk already listed the filenames:
import os, fnmatch
def find_files(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
for filename in find_files('src', '*.c'):
print 'Found C source:', filename
Also, using a generator alows you to process each file as it is found, instead of finding all the files and then processing them.
I've modified the glob module to support ** for recursive globbing, e.g:
>>> import glob2
>>> all_header_files = glob2.glob('src/**/*.c')
https://github.com/miracle2k/python-glob2/
Useful when you want to provide your users with the ability to use the ** syntax, and thus os.walk() alone is not good enough.
Starting with Python 3.4, one can use the glob() method of one of the Path classes in the new pathlib module, which supports ** wildcards. For example:
from pathlib import Path
for file_path in Path('src').glob('**/*.c'):
print(file_path) # do whatever you need with these files
Update:
Starting with Python 3.5, the same syntax is also supported by glob.glob().
import os
import fnmatch
def recursive_glob(treeroot, pattern):
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results
fnmatch gives you exactly the same patterns as glob, so this is really an excellent replacement for glob.glob with very close semantics. An iterative version (e.g. a generator), IOW a replacement for glob.iglob, is a trivial adaptation (just yield the intermediate results as you go, instead of extending a single results list to return at the end).
You'll want to use os.walk to collect filenames that match your criteria. For example:
import os
cfiles = []
for root, dirs, files in os.walk('src'):
for file in files:
if file.endswith('.c'):
cfiles.append(os.path.join(root, file))
Here's a solution with nested list comprehensions, os.walk and simple suffix matching instead of glob:
import os
cfiles = [os.path.join(root, filename)
for root, dirnames, filenames in os.walk('src')
for filename in filenames if filename.endswith('.c')]
It can be compressed to a one-liner:
import os;cfiles=[os.path.join(r,f) for r,d,fs in os.walk('src') for f in fs if f.endswith('.c')]
or generalized as a function:
import os
def recursive_glob(rootdir='.', suffix=''):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames if filename.endswith(suffix)]
cfiles = recursive_glob('src', '.c')
If you do need full glob style patterns, you can follow Alex's and
Bruno's example and use fnmatch:
import fnmatch
import os
def recursive_glob(rootdir='.', pattern='*'):
return [os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames
if fnmatch.fnmatch(filename, pattern)]
cfiles = recursive_glob('src', '*.c')
Consider pathlib.rglob().
This is like calling Path.glob() with "**/" added in front of the given relative pattern:
import pathlib
for p in pathlib.Path("src").rglob("*.c"):
print(p)
See also #taleinat's related post here and a similar post elsewhere.
import os, glob
for each in glob.glob('path/**/*.c', recursive=True):
print(f'Name with path: {each} \nName without path: {os.path.basename(each)}')
glob.glob('*.c') :matches all files ending in .c in current directory
glob.glob('*/*.c') :same as 1
glob.glob('**/*.c') :matches all files ending in .c in the immediate subdirectories only, but not in the current directory
glob.glob('*.c',recursive=True) :same as 1
glob.glob('*/*.c',recursive=True) :same as 3
glob.glob('**/*.c',recursive=True) :matches all files ending in .c in the current directory and in all subdirectories
In case this may interest anyone, I've profiled the top three proposed methods.
I have about ~500K files in the globbed folder (in total), and 2K files that match the desired pattern.
here's the (very basic) code
import glob
import json
import fnmatch
import os
from pathlib import Path
from time import time
def find_files_iglob():
return glob.iglob("./data/**/data.json", recursive=True)
def find_files_oswalk():
for root, dirnames, filenames in os.walk('data'):
for filename in fnmatch.filter(filenames, 'data.json'):
yield os.path.join(root, filename)
def find_files_rglob():
return Path('data').rglob('data.json')
t0 = time()
for f in find_files_oswalk(): pass
t1 = time()
for f in find_files_rglob(): pass
t2 = time()
for f in find_files_iglob(): pass
t3 = time()
print(t1-t0, t2-t1, t3-t2)
And the results I got were:
os_walk: ~3.6sec
rglob ~14.5sec
iglob: ~16.9sec
The platform: Ubuntu 16.04, x86_64 (core i7),
Recently I had to recover my pictures with the extension .jpg. I ran photorec and recovered 4579 directories 2.2 million files within, having tremendous variety of extensions.With the script below I was able to select 50133 files havin .jpg extension within minutes:
#!/usr/binenv python2.7
import glob
import shutil
import os
src_dir = "/home/mustafa/Masaüstü/yedek"
dst_dir = "/home/mustafa/Genel/media"
for mediafile in glob.iglob(os.path.join(src_dir, "*", "*.jpg")): #"*" is for subdirectory
shutil.copy(mediafile, dst_dir)
based on other answers this is my current working implementation, which retrieves nested xml files in a root directory:
files = []
for root, dirnames, filenames in os.walk(myDir):
files.extend(glob.glob(root + "/*.xml"))
I'm really having fun with python :)
For python 3.5 and later
import glob
#file_names_array = glob.glob('path/*.c', recursive=True)
#above works for files directly at path/ as guided by NeStack
#updated version
file_names_array = glob.glob('path/**/*.c', recursive=True)
further you might need
for full_path_in_src in file_names_array:
print (full_path_in_src ) # be like 'abc/xyz.c'
#Full system path of this would be like => 'path till src/abc/xyz.c'
Johan and Bruno provide excellent solutions on the minimal requirement as stated. I have just released Formic which implements Ant FileSet and Globs which can handle this and more complicated scenarios. An implementation of your requirement is:
import formic
fileset = formic.FileSet(include="/src/**/*.c")
for file_name in fileset.qualified_files():
print file_name
Another way to do it using just the glob module. Just seed the rglob method with a starting base directory and a pattern to match and it will return a list of matching file names.
import glob
import os
def _getDirs(base):
return [x for x in glob.iglob(os.path.join( base, '*')) if os.path.isdir(x) ]
def rglob(base, pattern):
list = []
list.extend(glob.glob(os.path.join(base,pattern)))
dirs = _getDirs(base)
if len(dirs):
for d in dirs:
list.extend(rglob(os.path.join(base,d), pattern))
return list
Or with a list comprehension:
>>> base = r"c:\User\xtofl"
>>> binfiles = [ os.path.join(base,f)
for base, _, files in os.walk(root)
for f in files if f.endswith(".jpg") ]
If the files are on a remote file system or inside an archive, you can use an implementation of the fsspec AbstractFileSystem class. For example, to list all the files in a zipfile:
from fsspec.implementations.zip import ZipFileSystem
fs = ZipFileSystem("/tmp/test.zip")
fs.glob("/**") # equivalent: fs.find("/")
or to list all the files in a publicly available S3 bucket:
from s3fs import S3FileSystem
fs_s3 = S3FileSystem(anon=True)
fs_s3.glob("noaa-goes16/ABI-L1b-RadF/2020/045/**") # or use fs_s3.find
you can also use it for a local filesystem, which may be interesting if your implementation should be filesystem-agnostic:
from fsspec.implementations.local import LocalFileSystem
fs = LocalFileSystem()
fs.glob("/tmp/test/**")
Other implementations include Google Cloud, Github, SFTP/SSH, Dropbox, and Azure. For details, see the fsspec API documentation.
Just made this.. it will print files and directory in hierarchical way
But I didn't used fnmatch or walk
#!/usr/bin/python
import os,glob,sys
def dirlist(path, c = 1):
for i in glob.glob(os.path.join(path, "*")):
if os.path.isfile(i):
filepath, filename = os.path.split(i)
print '----' *c + filename
elif os.path.isdir(i):
dirname = os.path.basename(i)
print '----' *c + dirname
c+=1
dirlist(i,c)
c-=1
path = os.path.normpath(sys.argv[1])
print(os.path.basename(path))
dirlist(path)
That one uses fnmatch or regular expression:
import fnmatch, os
def filepaths(directory, pattern):
for root, dirs, files in os.walk(directory):
for basename in files:
try:
matched = pattern.match(basename)
except AttributeError:
matched = fnmatch.fnmatch(basename, pattern)
if matched:
yield os.path.join(root, basename)
# usage
if __name__ == '__main__':
from pprint import pprint as pp
import re
path = r'/Users/hipertracker/app/myapp'
pp([x for x in filepaths(path, re.compile(r'.*\.py$'))])
pp([x for x in filepaths(path, '*.py')])
In addition to the suggested answers, you can do this with some lazy generation and list comprehension magic:
import os, glob, itertools
results = itertools.chain.from_iterable(glob.iglob(os.path.join(root,'*.c'))
for root, dirs, files in os.walk('src'))
for f in results: print(f)
Besides fitting in one line and avoiding unnecessary lists in memory, this also has the nice side effect, that you can use it in a way similar to the ** operator, e.g., you could use os.path.join(root, 'some/path/*.c') in order to get all .c files in all sub directories of src that have this structure.
This is a working code on Python 2.7. As part of my devops work, I was required to write a script which would move the config files marked with live-appName.properties to appName.properties. There could be other extension files as well like live-appName.xml.
Below is a working code for this, which finds the files in the given directories (nested level) and then renames (moves) it to the required filename
def flipProperties(searchDir):
print "Flipping properties to point to live DB"
for root, dirnames, filenames in os.walk(searchDir):
for filename in fnmatch.filter(filenames, 'live-*.*'):
targetFileName = os.path.join(root, filename.split("live-")[1])
print "File "+ os.path.join(root, filename) + "will be moved to " + targetFileName
shutil.move(os.path.join(root, filename), targetFileName)
This function is called from a main script
flipProperties(searchDir)
Hope this helps someone struggling with similar issues.
Simplified version of Johan Dahlin's answer, without fnmatch.
import os
matches = []
for root, dirnames, filenames in os.walk('src'):
matches += [os.path.join(root, f) for f in filenames if f[-2:] == '.c']
Here is my solution using list comprehension to search for multiple file extensions recursively in a directory and all subdirectories:
import os, glob
def _globrec(path, *exts):
""" Glob recursively a directory and all subdirectories for multiple file extensions
Note: Glob is case-insensitive, i. e. for '\*.jpg' you will get files ending
with .jpg and .JPG
Parameters
----------
path : str
A directory name
exts : tuple
File extensions to glob for
Returns
-------
files : list
list of files matching extensions in exts in path and subfolders
"""
dirs = [a[0] for a in os.walk(path)]
f_filter = [d+e for d in dirs for e in exts]
return [f for files in [glob.iglob(files) for files in f_filter] for f in files]
my_pictures = _globrec(r'C:\Temp', '\*.jpg','\*.bmp','\*.png','\*.gif')
for f in my_pictures:
print f
import sys, os, glob
dir_list = ["c:\\books\\heap"]
while len(dir_list) > 0:
cur_dir = dir_list[0]
del dir_list[0]
list_of_files = glob.glob(cur_dir+'\\*')
for book in list_of_files:
if os.path.isfile(book):
print(book)
else:
dir_list.append(book)
I modified the top answer in this posting.. and recently created this script which will loop through all files in a given directory (searchdir) and the sub-directories under it... and prints filename, rootdir, modified/creation date, and size.
Hope this helps someone... and they can walk the directory and get fileinfo.
import time
import fnmatch
import os
def fileinfo(file):
filename = os.path.basename(file)
rootdir = os.path.dirname(file)
lastmod = time.ctime(os.path.getmtime(file))
creation = time.ctime(os.path.getctime(file))
filesize = os.path.getsize(file)
print "%s**\t%s\t%s\t%s\t%s" % (rootdir, filename, lastmod, creation, filesize)
searchdir = r'D:\Your\Directory\Root'
matches = []
for root, dirnames, filenames in os.walk(searchdir):
## for filename in fnmatch.filter(filenames, '*.c'):
for filename in filenames:
## matches.append(os.path.join(root, filename))
##print matches
fileinfo(os.path.join(root, filename))
Here is a solution that will match the pattern against the full path and not just the base filename.
It uses fnmatch.translate to convert a glob-style pattern into a regular expression, which is then matched against the full path of each file found while walking the directory.
re.IGNORECASE is optional, but desirable on Windows since the file system itself is not case-sensitive. (I didn't bother compiling the regex because docs indicate it should be cached internally.)
import fnmatch
import os
import re
def findfiles(dir, pattern):
patternregex = fnmatch.translate(pattern)
for root, dirs, files in os.walk(dir):
for basename in files:
filename = os.path.join(root, basename)
if re.search(patternregex, filename, re.IGNORECASE):
yield filename
I needed a solution for python 2.x that works fast on large directories.
I endet up with this:
import subprocess
foundfiles= subprocess.check_output("ls src/*.c src/**/*.c", shell=True)
for foundfile in foundfiles.splitlines():
print foundfile
Note that you might need some exception handling in case ls doesn't find any matching file.

Categories