Unable to read file from directory - python

I have following Directory structure,
F:\TestData
and TestData contains 20 folders with name node1, node2,..., node20
and each node folder contains file with names log.10.X
I need to access each log file from all node folders, For which I have writen code, but it is saying, File not found - log.*
CODE:
directory = "F:\TestData"
p = subprocess.Popen(["find", "./" + directory, "-name", "log.*"], stdout=subprocess.PIPE)
output, err = p.communicate()
foutput = output.split("\n")

Python, unlike POSIX shells, does not automatically do globbing (interpreting * and the like as wildcards related to files in the relevant directory) in strings. It does, however, provide a glob module for that purpose. You can use this to get a list of matching filenames:
import glob
filenames = glob.glob(r'F:\TestData\node*\log.*')

You can just use python to get a list of files in the directory
import os
directory = "F:\TestData\"
file_list = os.listdir(directory)
log_list = filter(lambda x: x.startswith("log"), file_list)
oh, you have to code to iterate sub directory.
First os.listdir()in the parent directory ,and iterate the sub directory to get the files

Python's glob module may be an option.
import glob
directory = 'F:\TestData'
logcontents = [open(f,'r').read() for f in glob.glob(directory + '\node*\log.*')]

You also use walk, like this:
import os
directory = "F:\TestData"
for i in os.walk(directory):
# i like this:
# ('F:\\TestData', ['node1', 'node2', 'node3'], [])
# ('F:\\TestData\\node1', [], ['log.1.txt'])
# ('F:\\TestData\\node2', [], ['log.2.txt'])
print i
if i[2] != []:
# TODO: use the path to finish other
# If dictory noden have some log file, you should use i[2][n].
# So, if you only need log.n.txt, you only use i[2][n].
print os.path.join(i[0], i[2][0])

Related

How to loop and optimise data extraction - Python [duplicate]

I need to iterate through all .asm files inside a given directory and do some actions on them.
How can this be done in a efficient way?
Python 3.6 version of the above answer, using os - assuming that you have the directory path as a str object in a variable called directory_in_str:
import os
directory = os.fsencode(directory_in_str)
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
Or recursively, using pathlib:
from pathlib import Path
pathlist = Path(directory_in_str).glob('**/*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Use rglob to replace glob('**/*.asm') with rglob('*.asm')
This is like calling Path.glob() with '**/' added in front of the given relative pattern:
from pathlib import Path
pathlist = Path(directory_in_str).rglob('*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Original answer:
import os
for filename in os.listdir("/path/to/dir/"):
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
This will iterate over all descendant files, not just the immediate children of the directory:
import os
for subdir, dirs, files in os.walk(rootdir):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
if filepath.endswith(".asm"):
print (filepath)
You can try using glob module:
import glob
for filepath in glob.iglob('my_dir/*.asm'):
print(filepath)
and since Python 3.5 you can search subdirectories as well:
glob.glob('**/*.txt', recursive=True) # => ['2.txt', 'sub/3.txt']
From the docs:
The glob module finds all the pathnames matching a specified pattern according to the rules used by the Unix shell, although results are returned in arbitrary order. No tilde expansion is done, but *, ?, and character ranges expressed with [] will be correctly matched.
Since Python 3.5, things are much easier with os.scandir() and 2-20x faster (source):
with os.scandir(path) as it:
for entry in it:
if entry.name.endswith(".asm") and entry.is_file():
print(entry.name, entry.path)
Using scandir() instead of listdir() can significantly increase the
performance of code that also needs file type or file attribute
information, because os.DirEntry objects expose this information if
the operating system provides it when scanning a directory. All
os.DirEntry methods may perform a system call, but is_dir() and
is_file() usually only require a system call for symbolic links;
os.DirEntry.stat() always requires a system call on Unix but only
requires one for symbolic links on Windows.
Python 3.4 and later offer pathlib in the standard library. You could do:
from pathlib import Path
asm_pths = [pth for pth in Path.cwd().iterdir()
if pth.suffix == '.asm']
Or if you don't like list comprehensions:
asm_paths = []
for pth in Path.cwd().iterdir():
if pth.suffix == '.asm':
asm_pths.append(pth)
Path objects can easily be converted to strings.
Here's how I iterate through files in Python:
import os
path = 'the/name/of/your/path'
folder = os.fsencode(path)
filenames = []
for file in os.listdir(folder):
filename = os.fsdecode(file)
if filename.endswith( ('.jpeg', '.png', '.gif') ): # whatever file types you're using...
filenames.append(filename)
filenames.sort() # now you have the filenames and can do something with them
NONE OF THESE TECHNIQUES GUARANTEE ANY ITERATION ORDERING
Yup, super unpredictable. Notice that I sort the filenames, which is important if the order of the files matters, i.e. for video frames or time dependent data collection. Be sure to put indices in your filenames though!
You can use glob for referring the directory and the list :
import glob
import os
#to get the current working directory name
cwd = os.getcwd()
#Load the images from images folder.
for f in glob.glob('images\*.jpg'):
dir_name = get_dir_name(f)
image_file_name = dir_name + '.jpg'
#To print the file name with path (path will be in string)
print (image_file_name)
To get the list of all directory in array you can use os :
os.listdir(directory)
I'm not quite happy with this implementation yet, I wanted to have a custom constructor that does DirectoryIndex._make(next(os.walk(input_path))) such that you can just pass the path you want a file listing for. Edits welcome!
import collections
import os
DirectoryIndex = collections.namedtuple('DirectoryIndex', ['root', 'dirs', 'files'])
for file_name in DirectoryIndex(*next(os.walk('.'))).files:
file_path = os.path.join(path, file_name)
I really like using the scandir directive that is built into the os library. Here is a working example:
import os
i = 0
with os.scandir('/usr/local/bin') as root_dir:
for path in root_dir:
if path.is_file():
i += 1
print(f"Full path is: {path} and just the name is: {path.name}")
print(f"{i} files scanned successfully.")
Get all the .asm files in a directory by doing this.
import os
path = "path_to_file"
file_type = '.asm'
for filename in os.listdir(path=path):
if filename.endswith(file_type):
print(filename)
print(f"{path}/{filename}")
# do something below
I don't understand why some answers are complicated. This is how I would do it with Python 2.7. Replace DIRECTORY_TO_LOOP with the directory you want to use.
import os
DIRECTORY_TO_LOOP = '/var/www/files/'
for root, dirs, files in os.walk(DIRECTORY_TO_LOOP, topdown=False):
for name in files:
print(os.path.join(root, name))

Extract a part of the filepath (a directory) in Python

I need to extract the name of the parent directory of a certain path. This is what it looks like:
C:\stuff\directory_i_need\subdir\file.jpg
I would like to extract directory_i_need.
import os
## first file in current dir (with full path)
file = os.path.join(os.getcwd(), os.listdir(os.getcwd())[0])
file
os.path.dirname(file) ## directory of file
os.path.dirname(os.path.dirname(file)) ## directory of directory of file
...
And you can continue doing this as many times as necessary...
Edit: from os.path, you can use either os.path.split or os.path.basename:
dir = os.path.dirname(os.path.dirname(file)) ## dir of dir of file
## once you're at the directory level you want, with the desired directory as the final path node:
dirname1 = os.path.basename(dir)
dirname2 = os.path.split(dir)[1] ## if you look at the documentation, this is exactly what os.path.basename does.
For Python 3.4+, try the pathlib module:
>>> from pathlib import Path
>>> p = Path('C:\\Program Files\\Internet Explorer\\iexplore.exe')
>>> str(p.parent)
'C:\\Program Files\\Internet Explorer'
>>> p.name
'iexplore.exe'
>>> p.suffix
'.exe'
>>> p.parts
('C:\\', 'Program Files', 'Internet Explorer', 'iexplore.exe')
>>> p.relative_to('C:\\Program Files')
WindowsPath('Internet Explorer/iexplore.exe')
>>> p.exists()
True
All you need is parent part if you use pathlib.
from pathlib import Path
p = Path(r'C:\Program Files\Internet Explorer\iexplore.exe')
print(p.parent)
Will output:
C:\Program Files\Internet Explorer
Case you need all parts (already covered in other answers) use parts:
p = Path(r'C:\Program Files\Internet Explorer\iexplore.exe')
print(p.parts)
Then you will get a list:
('C:\\', 'Program Files', 'Internet Explorer', 'iexplore.exe')
Saves tone of time.
First, see if you have splitunc() as an available function within os.path. The first item returned should be what you want... but I am on Linux and I do not have this function when I import os and try to use it.
Otherwise, one semi-ugly way that gets the job done is to use:
>>> pathname = "\\C:\\mystuff\\project\\file.py"
>>> pathname
'\\C:\\mystuff\\project\\file.py'
>>> print pathname
\C:\mystuff\project\file.py
>>> "\\".join(pathname.split('\\')[:-2])
'\\C:\\mystuff'
>>> "\\".join(pathname.split('\\')[:-1])
'\\C:\\mystuff\\project'
which shows retrieving the directory just above the file, and the directory just above that.
import os
directory = os.path.abspath('\\') # root directory
print(directory) # e.g. 'C:\'
directory = os.path.abspath('.') # current directory
print(directory) # e.g. 'C:\Users\User\Desktop'
parent_directory, directory_name = os.path.split(directory)
print(directory_name) # e.g. 'Desktop'
parent_parent_directory, parent_directory_name = os.path.split(parent_directory)
print(parent_directory_name) # e.g. 'User'
This should also do the trick.
This is what I did to extract the piece of the directory:
for path in file_list:
directories = path.rsplit('\\')
directories.reverse()
line_replace_add_directory = line_replace+directories[2]
Thank you for your help.
You have to put the entire path as a parameter to os.path.split. See The docs. It doesn't work like string split.

How can I iterate over files in a given directory?

I need to iterate through all .asm files inside a given directory and do some actions on them.
How can this be done in a efficient way?
Python 3.6 version of the above answer, using os - assuming that you have the directory path as a str object in a variable called directory_in_str:
import os
directory = os.fsencode(directory_in_str)
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
Or recursively, using pathlib:
from pathlib import Path
pathlist = Path(directory_in_str).glob('**/*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Use rglob to replace glob('**/*.asm') with rglob('*.asm')
This is like calling Path.glob() with '**/' added in front of the given relative pattern:
from pathlib import Path
pathlist = Path(directory_in_str).rglob('*.asm')
for path in pathlist:
# because path is object not string
path_in_str = str(path)
# print(path_in_str)
Original answer:
import os
for filename in os.listdir("/path/to/dir/"):
if filename.endswith(".asm") or filename.endswith(".py"):
# print(os.path.join(directory, filename))
continue
else:
continue
This will iterate over all descendant files, not just the immediate children of the directory:
import os
for subdir, dirs, files in os.walk(rootdir):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
if filepath.endswith(".asm"):
print (filepath)
You can try using glob module:
import glob
for filepath in glob.iglob('my_dir/*.asm'):
print(filepath)
and since Python 3.5 you can search subdirectories as well:
glob.glob('**/*.txt', recursive=True) # => ['2.txt', 'sub/3.txt']
From the docs:
The glob module finds all the pathnames matching a specified pattern according to the rules used by the Unix shell, although results are returned in arbitrary order. No tilde expansion is done, but *, ?, and character ranges expressed with [] will be correctly matched.
Since Python 3.5, things are much easier with os.scandir() and 2-20x faster (source):
with os.scandir(path) as it:
for entry in it:
if entry.name.endswith(".asm") and entry.is_file():
print(entry.name, entry.path)
Using scandir() instead of listdir() can significantly increase the
performance of code that also needs file type or file attribute
information, because os.DirEntry objects expose this information if
the operating system provides it when scanning a directory. All
os.DirEntry methods may perform a system call, but is_dir() and
is_file() usually only require a system call for symbolic links;
os.DirEntry.stat() always requires a system call on Unix but only
requires one for symbolic links on Windows.
Python 3.4 and later offer pathlib in the standard library. You could do:
from pathlib import Path
asm_pths = [pth for pth in Path.cwd().iterdir()
if pth.suffix == '.asm']
Or if you don't like list comprehensions:
asm_paths = []
for pth in Path.cwd().iterdir():
if pth.suffix == '.asm':
asm_pths.append(pth)
Path objects can easily be converted to strings.
Here's how I iterate through files in Python:
import os
path = 'the/name/of/your/path'
folder = os.fsencode(path)
filenames = []
for file in os.listdir(folder):
filename = os.fsdecode(file)
if filename.endswith( ('.jpeg', '.png', '.gif') ): # whatever file types you're using...
filenames.append(filename)
filenames.sort() # now you have the filenames and can do something with them
NONE OF THESE TECHNIQUES GUARANTEE ANY ITERATION ORDERING
Yup, super unpredictable. Notice that I sort the filenames, which is important if the order of the files matters, i.e. for video frames or time dependent data collection. Be sure to put indices in your filenames though!
You can use glob for referring the directory and the list :
import glob
import os
#to get the current working directory name
cwd = os.getcwd()
#Load the images from images folder.
for f in glob.glob('images\*.jpg'):
dir_name = get_dir_name(f)
image_file_name = dir_name + '.jpg'
#To print the file name with path (path will be in string)
print (image_file_name)
To get the list of all directory in array you can use os :
os.listdir(directory)
I'm not quite happy with this implementation yet, I wanted to have a custom constructor that does DirectoryIndex._make(next(os.walk(input_path))) such that you can just pass the path you want a file listing for. Edits welcome!
import collections
import os
DirectoryIndex = collections.namedtuple('DirectoryIndex', ['root', 'dirs', 'files'])
for file_name in DirectoryIndex(*next(os.walk('.'))).files:
file_path = os.path.join(path, file_name)
I really like using the scandir directive that is built into the os library. Here is a working example:
import os
i = 0
with os.scandir('/usr/local/bin') as root_dir:
for path in root_dir:
if path.is_file():
i += 1
print(f"Full path is: {path} and just the name is: {path.name}")
print(f"{i} files scanned successfully.")
Get all the .asm files in a directory by doing this.
import os
path = "path_to_file"
file_type = '.asm'
for filename in os.listdir(path=path):
if filename.endswith(file_type):
print(filename)
print(f"{path}/{filename}")
# do something below
I don't understand why some answers are complicated. This is how I would do it with Python 2.7. Replace DIRECTORY_TO_LOOP with the directory you want to use.
import os
DIRECTORY_TO_LOOP = '/var/www/files/'
for root, dirs, files in os.walk(DIRECTORY_TO_LOOP, topdown=False):
for name in files:
print(os.path.join(root, name))

Find all CSV files in a directory using Python

How can I find all files in directory with the extension .csv in python?
import os
import glob
path = 'c:\\'
extension = 'csv'
os.chdir(path)
result = glob.glob('*.{}'.format(extension))
print(result)
from os import listdir
def find_csv_filenames( path_to_dir, suffix=".csv" ):
filenames = listdir(path_to_dir)
return [ filename for filename in filenames if filename.endswith( suffix ) ]
The function find_csv_filenames() returns a list of filenames as strings, that reside in the directory path_to_dir with the given suffix (by default, ".csv").
Addendum
How to print the filenames:
filenames = find_csv_filenames("my/directory")
for name in filenames:
print name
By using the combination of filters and lambda, you can easily filter out csv files in given folder.
import os
all_files = os.listdir("/path-to-dir")
csv_files = list(filter(lambda f: f.endswith('.csv'), all_files))
# lambda returns True if filename (within `all_files`) ends with .csv or else False
# and filter function uses the returned boolean value to filter .csv files from list files.
use Python OS module to find csv file in a directory.
the simple example is here :
import os
# This is the path where you want to search
path = r'd:'
# this is the extension you want to detect
extension = '.csv'
for root, dirs_list, files_list in os.walk(path):
for file_name in files_list:
if os.path.splitext(file_name)[-1] == extension:
file_name_path = os.path.join(root, file_name)
print file_name
print file_name_path # This is the full path of the filter file
I had to get csv files that were in subdirectories, therefore, using the response from tchlpr I modified it to work best for my use case:
import os
import glob
os.chdir( '/path/to/main/dir' )
result = glob.glob( '*/**.csv' )
print( result )
import os
path = 'C:/Users/Shashank/Desktop/'
os.chdir(path)
for p,n,f in os.walk(os.getcwd()):
for a in f:
a = str(a)
if a.endswith('.csv'):
print(a)
print(p)
This will help to identify path also of these csv files
While solution given by thclpr works it scans only immediate files in the directory and not files in the sub directories if any. Although this is not the requirement but just in case someone wishes to scan sub directories too below is the code that uses os.walk
import os
from glob import glob
PATH = "/home/someuser/projects/someproject"
EXT = "*.csv"
all_csv_files = [file
for path, subdir, files in os.walk(PATH)
for file in glob(os.path.join(path, EXT))]
print(all_csv_files)
Copied from this blog.
Use the python glob module to easily list out the files we need.
import glob
path_csv=glob.glob("../data/subfolrder/*.csv")
You could just use glob with recursive = true, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories.
import glob, os
os.chdir("C:\\Users\\username\\Desktop\\MAIN_DIRECTORY")
for file in glob.glob("*/.csv", recursive = true):
print(file)
This solution uses the python function filter. This function creates a list of elements for which a function returns true. In this case, the anonymous function used is partial matching '.csv' on every element of the directory files list obtained with os.listdir('the path i want to look in')
import os
filepath= 'filepath_to_my_CSVs' # for example: './my_data/'
list(filter(lambda x: '.csv' in x, os.listdir('filepath_to_my_CSVs')))
Many (linked) answers change working directory with os.chdir(). But you don't have to.
Recursively print all CSV files in /home/project/ directory:
pathname = "/home/project/**/*.csv"
for file in glob.iglob(pathname, recursive=True):
print(file)
Requires python 3.5+. From docs [1]:
pathname can be either absolute (like /usr/src/Python-1.5/Makefile) or relative (like ../../Tools/*/*.gif)
pathname can contain shell-style wildcards.
Whether or not the results are sorted depends on the file system.
If recursive is true, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories
[1] https://docs.python.org/3/library/glob.html#glob.glob
You could just use glob with recursive = True, the pattern ** will match any files and zero or more directories, subdirectories and symbolic links to directories.
import glob, os
os.chdir("C:\\Users\\username\\Desktop\\MAIN_DIRECTORY")
for file in glob.glob("*/*.csv", recursive = True):
print(file)
Please use this tested working code. This function will return a list of all the CSV files with absolute CSV file paths in your specified path.
import os
from glob import glob
def get_csv_files(dir_path, ext):
os.chdir(dir_path)
return list(map(lambda x: os.path.join(dir_path, x), glob(f'*.{ext}')))
print(get_csv_files("E:\\input\\dir\\path", "csv"))

Directory-tree listing in Python

How do I get a list of all files (and directories) in a given directory in Python?
This is a way to traverse every file and directory in a directory tree:
import os
for dirname, dirnames, filenames in os.walk('.'):
# print path to all subdirectories first.
for subdirname in dirnames:
print(os.path.join(dirname, subdirname))
# print path to all filenames.
for filename in filenames:
print(os.path.join(dirname, filename))
# Advanced usage:
# editing the 'dirnames' list will stop os.walk() from recursing into there.
if '.git' in dirnames:
# don't go into any .git directories.
dirnames.remove('.git')
You can use
os.listdir(path)
For reference and more os functions look here:
Python 2 docs: https://docs.python.org/2/library/os.html#os.listdir
Python 3 docs: https://docs.python.org/3/library/os.html#os.listdir
Here's a helper function I use quite often:
import os
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
import os
for filename in os.listdir("C:\\temp"):
print filename
If you need globbing abilities, there's a module for that as well. For example:
import glob
glob.glob('./[0-9].*')
will return something like:
['./1.gif', './2.txt']
See the documentation here.
For files in current working directory without specifying a path
Python 2.7:
import os
os.listdir('.')
Python 3.x:
import os
os.listdir()
Try this:
import os
for top, dirs, files in os.walk('./'):
for nm in files:
print os.path.join(top, nm)
While os.listdir() is fine for generating a list of file and dir names, frequently you want to do more once you have those names - and in Python3, pathlib makes those other chores simple. Let's take a look and see if you like it as much as I do.
To list dir contents, construct a Path object and grab the iterator:
In [16]: Path('/etc').iterdir()
Out[16]: <generator object Path.iterdir at 0x110853fc0>
If we want just a list of names of things:
In [17]: [x.name for x in Path('/etc').iterdir()]
Out[17]:
['emond.d',
'ntp-restrict.conf',
'periodic',
If you want just the dirs:
In [18]: [x.name for x in Path('/etc').iterdir() if x.is_dir()]
Out[18]:
['emond.d',
'periodic',
'mach_init.d',
If you want the names of all conf files in that tree:
In [20]: [x.name for x in Path('/etc').glob('**/*.conf')]
Out[20]:
['ntp-restrict.conf',
'dnsextd.conf',
'syslog.conf',
If you want a list of conf files in the tree >= 1K:
In [23]: [x.name for x in Path('/etc').glob('**/*.conf') if x.stat().st_size > 1024]
Out[23]:
['dnsextd.conf',
'pf.conf',
'autofs.conf',
Resolving relative paths become easy:
In [32]: Path('../Operational Metrics.md').resolve()
Out[32]: PosixPath('/Users/starver/code/xxxx/Operational Metrics.md')
Navigating with a Path is pretty clear (although unexpected):
In [10]: p = Path('.')
In [11]: core = p / 'web' / 'core'
In [13]: [x for x in core.iterdir() if x.is_file()]
Out[13]:
[PosixPath('web/core/metrics.py'),
PosixPath('web/core/services.py'),
PosixPath('web/core/querysets.py'),
A recursive implementation
import os
def scan_dir(dir):
for name in os.listdir(dir):
path = os.path.join(dir, name)
if os.path.isfile(path):
print path
else:
scan_dir(path)
I wrote a long version, with all the options I might need: http://sam.nipl.net/code/python/find.py
I guess it will fit here too:
#!/usr/bin/env python
import os
import sys
def ls(dir, hidden=False, relative=True):
nodes = []
for nm in os.listdir(dir):
if not hidden and nm.startswith('.'):
continue
if not relative:
nm = os.path.join(dir, nm)
nodes.append(nm)
nodes.sort()
return nodes
def find(root, files=True, dirs=False, hidden=False, relative=True, topdown=True):
root = os.path.join(root, '') # add slash if not there
for parent, ldirs, lfiles in os.walk(root, topdown=topdown):
if relative:
parent = parent[len(root):]
if dirs and parent:
yield os.path.join(parent, '')
if not hidden:
lfiles = [nm for nm in lfiles if not nm.startswith('.')]
ldirs[:] = [nm for nm in ldirs if not nm.startswith('.')] # in place
if files:
lfiles.sort()
for nm in lfiles:
nm = os.path.join(parent, nm)
yield nm
def test(root):
print "* directory listing, with hidden files:"
print ls(root, hidden=True)
print
print "* recursive listing, with dirs, but no hidden files:"
for f in find(root, dirs=True):
print f
print
if __name__ == "__main__":
test(*sys.argv[1:])
Here is another option.
os.scandir(path='.')
It returns an iterator of os.DirEntry objects corresponding to the entries (along with file attribute information) in the directory given by path.
Example:
with os.scandir(path) as it:
for entry in it:
if not entry.name.startswith('.'):
print(entry.name)
Using scandir() instead of listdir() can significantly increase the performance of code that also needs file type or file attribute information, because os.DirEntry objects expose this information if the operating system provides it when scanning a directory. All os.DirEntry methods may perform a system call, but is_dir() and is_file() usually only require a system call for symbolic links; os.DirEntry.stat() always requires a system call on Unix but only requires one for symbolic links on Windows.
Python Docs
The one worked with me is kind of a modified version from Saleh's answer elsewhere on this page.
The code is as follows:
dir = 'given_directory_name'
filenames = [os.path.abspath(os.path.join(dir,i)) for i in os.listdir(dir)]
A nice one liner to list only the files recursively. I used this in my setup.py package_data directive:
import os
[os.path.join(x[0],y) for x in os.walk('<some_directory>') for y in x[2]]
I know it's not the answer to the question, but may come in handy
For Python 2
#!/bin/python2
import os
def scan_dir(path):
print map(os.path.abspath, os.listdir(pwd))
For Python 3
For filter and map, you need wrap them with list()
#!/bin/python3
import os
def scan_dir(path):
print(list(map(os.path.abspath, os.listdir(pwd))))
The recommendation now is that you replace your usage of map and filter with generators expressions or list comprehensions:
#!/bin/python
import os
def scan_dir(path):
print([os.path.abspath(f) for f in os.listdir(path)])
#import modules
import os
_CURRENT_DIR = '.'
def rec_tree_traverse(curr_dir, indent):
"recurcive function to traverse the directory"
#print "[traverse_tree]"
try :
dfList = [os.path.join(curr_dir, f_or_d) for f_or_d in os.listdir(curr_dir)]
except:
print "wrong path name/directory name"
return
for file_or_dir in dfList:
if os.path.isdir(file_or_dir):
#print "dir : ",
print indent, file_or_dir,"\\"
rec_tree_traverse(file_or_dir, indent*2)
if os.path.isfile(file_or_dir):
#print "file : ",
print indent, file_or_dir
#end if for loop
#end of traverse_tree()
def main():
base_dir = _CURRENT_DIR
rec_tree_traverse(base_dir," ")
raw_input("enter any key to exit....")
#end of main()
if __name__ == '__main__':
main()
FYI Add a filter of extension or ext file
import os
path = '.'
for dirname, dirnames, filenames in os.walk(path):
# print path to all filenames with extension py.
for filename in filenames:
fname_path = os.path.join(dirname, filename)
fext = os.path.splitext(fname_path)[1]
if fext == '.py':
print fname_path
else:
continue
If figured I'd throw this in. Simple and dirty way to do wildcard searches.
import re
import os
[a for a in os.listdir(".") if re.search("^.*\.py$",a)]
Below code will list directories and the files within the dir
def print_directory_contents(sPath):
import os
for sChild in os.listdir(sPath):
sChildPath = os.path.join(sPath,sChild)
if os.path.isdir(sChildPath):
print_directory_contents(sChildPath)
else:
print(sChildPath)
Here is a one line Pythonic version:
import os
dir = 'given_directory_name'
filenames = [os.path.join(os.path.dirname(os.path.abspath(__file__)),dir,i) for i in os.listdir(dir)]
This code lists the full path of all files and directories in the given directory name.
I know this is an old question. This is a neat way I came across if you are on a liunx machine.
import subprocess
print(subprocess.check_output(["ls", "/"]).decode("utf8"))
Easiest way:
list_output_files = [os.getcwd()+"\\"+f for f in os.listdir(os.getcwd())]

Categories