I am trying to pull a list of top level directories under C:/Folder.
I want to then check the modified date for each directory. I can use
next(os.walk(source_dir))[1]
to get a list of directories.
But when I try and use
os.stat(str(dir_list))[stat.ST_MTIME]
to get the modified date for each directory, it seems to be searching for all directory names as one large name.
This gives me a File Not Found error.
How can I get the modified date for each subdirectory without getting a File Not Found error?
import os.path
import time, stat
# Source directory
source_dir = r'C:/Folder'
def check_dir():
if not os.path.isdir(source_dir):
# Needed to detect top level files
print("Nothing in top folder.")
else:
dir_list = next(os.walk(source_dir))[1]
print(dir_list)
# get modified time for directories
dtime = os.stat(str(dir_list))[stat.ST_MTIME]
if time.time() - dtime <= 7776000:
print("Found modification in last 90 days for folder =>", dir_list, time.ctime(dtime))
if __name__ == "__main__":
check_dir()
you can't use os.stat for a list you have to iterate over the list:
def check_dir():
if not os.path.isdir(source_dir):
# Needed to detect top level files
print("Nothing in top folder.")
else:
root, dir_list, _ = next(os.walk(source_dir))
print(dir_list)
# get modified time for directories
for dir in dir_list:
dtime = os.stat(str(root + os.path.sep + dir))[stat.ST_MTIME]
if time.time() - dtime <= 7776000:
print("Found modification in last 90 days for folder =>", dir, time.ctime(dtime))
if __name__ == "__main__":
check_dir()
The problem is that os.walk gives you a relative path to the source_dir for example:
import os
source_dir = '/tmp/'
relative_paths = [relative_path for relative_path in next(os.walk(source_dir))[1]]
print(relative_paths)
>>> ['com.google.Keystone', 'com.apple.launchd.liNmfkel1M', 'powerlog']
In order to get the stats you need to use the full path of each folder, for example:
import os
import stat
source_dir = '/tmp/'
full_paths = [os.path.join(source_dir, relative_path) for relative_path in next(os.walk(source_dir))[1]]
print(full_paths)
>>> ['/tmp/com.google.Keystone', '/tmp/com.apple.launchd.liNmfkel1M', '/tmp/powerlog']
And then you can use them:
import os
import stat
source_dir = '/tmp/'
full_paths = [os.path.join(source_dir, relative_path) for relative_path in next(os.walk(source_dir))[1]]
stats = [os.stat(path)[stat.ST_MTIME] for path in full_paths]
print(stats)
>>> [1581523169, 1581523164, 1581523141]
Related
I have a folder directories look somewhat like this:
C:/Documents/A350/a/1.png
/2.png
b/1.png
/B777/a/1.png
/B747/a/1.png
/2.png
b/1.png
c/1.png
d/1.png
/2.png
I want to move all png to the main folder i.e. Documents.
def recur(input_path):
dir_list = os.listdir(input_path)
for directory in dir_list:
path_name = os.path.join(input_path, directory)
p = pathlib.Path(path_name)
if p.is_dir():
input_path = path_name
return recur(input_path)
return input_path
I have some code to get the deepest path inside a folder, but i am not so sure how to use the recursive function to achieve what i wanted.
Any help would be really appreciated, thanks!!
Below program get all files recursively from parent directory and copies files to parent directory.
import os
import glob
import shutil
files_abs_paths = []
def get_all_files(parent_dir):
files_n_folders = glob.glob(f'{parent_dir}/**')
for fl_or_fldr in files_n_folders:
if os.path.isdir(fl_or_fldr):
folder = fl_or_fldr
get_all_files(folder)
else:
file = fl_or_fldr
files_abs_paths.append(file)
parent_dir = r"C:'/Documents"
# get all files recursively in parent dir
get_all_files(parent_dir)
# copies files to parent_dir
for fl in files_abs_paths:
# gets file_name
file_name = os.path.basename(fl)
# create file in parent_dir
new_file_loc = f'{parent_dir}/{file_name}'
if os.path.exists(new_file_loc) is False:
shutil.copyfile(fl, new_file_loc)
You can also get all the files from a folder tree using os.walk:
If you don't mind overwriting files with duplicate names:
from os import walk, rename
from os.path import join
def collect_files(root):
for src_path, _, files in walk(root):
if src_path != root:
for name in files:
rename(join(src_path, name), join(root, name))
If you want to add a number to the end of files with duplicate names:
from os import walk, rename
from os.path import join, splitext, exists
def collect_files(root):
for src_path, _, files in walk(root):
if src_path != root:
for name in files:
dst_name = name
dst_name_parts = splitext(dst_name)
file_num = 1
while exists(join(root, dst_name)):
dst_name = '{}_{:0>3}{}'.format(dst_name_parts[0], file_num, dst_name_parts[1])
file_num += 1
rename(join(src_path, name), join(root, dst_name))
Is there an inbuilt module to search for a file in the current directory, as well as all the super-directories?
Without the module, I'll have to list all the files in the current directory, search for the file in question, and recursively move up if the file isn't present. Is there an easier way to do this?
Well this is not so well implemented, but will work
use listdir to get list of files/folders in current directory and then in the list search for you file.
If it exists loop breaks but if it doesn't it goes to parent directory using os.path.dirname and listdir.
if cur_dir == '/' the parent dir for "/" is returned as "/" so if cur_dir == parent_dir it breaks the loop
import os
import os.path
file_name = "test.txt" #file to be searched
cur_dir = os.getcwd() # Dir from where search starts can be replaced with any path
while True:
file_list = os.listdir(cur_dir)
parent_dir = os.path.dirname(cur_dir)
if file_name in file_list:
print "File Exists in: ", cur_dir
break
else:
if cur_dir == parent_dir: #if dir is root dir
print "File not found"
break
else:
cur_dir = parent_dir
Here's another one, using pathlib:
from pathlib import Path
def find_upwards(cwd: Path, filename: str) -> Path | None:
if cwd == Path(cwd.root) or cwd == cwd.parent:
return None
fullpath = cwd / filename
return fullpath if fullpath.exists() else find_upwards(cwd.parent, filename)
# usage example:
find_upwards(Path.cwd(), "helloworld.txt")
(using some Python 3.10 typing syntax here, you can safely skip that if you are using an earlier version)
Another option, using pathlib:
from pathlib import Path
def search_upwards_for_file(filename):
"""Search in the current directory and all directories above it
for a file of a particular name.
Arguments:
---------
filename :: string, the filename to look for.
Returns
-------
pathlib.Path, the location of the first file found or
None, if none was found
"""
d = Path.cwd()
root = Path(d.root)
while d != root:
attempt = d / filename
if attempt.exists():
return attempt
d = d.parent
return None
The parent question was to walk parent directories (not descend into children like the find command):
# walk PARENT directories looking for `filename`:
f = 'filename'
d = os.getcwd()
while d != "/" and f not in os.listdir(d):
d = os.path.abspath(d + "/../")
if os.path.isfile(os.path.join(d,f)):
do_something(f)
Here's a version that uses shell globbing to match multiple files:
# walk PARENT directories looking for any *.csv files,
# stopping when a directory that contains any:
f = '*.csv'
d = os.getcwd()
while d != "/" and not glob.glob(os.path.join(d, f)):
d = os.path.abspath(d + "/../")
files = glob.glob(os.path.join(d,f))
for filename in files:
do_something(filename)
Here a function that does an upward search:
import sys, os, os.path
def up_dir(match,start=None):
"""
Find a parent path producing a match on one of its entries.
Without match an empty string is returned.
:param match: a function returning a bool on a directory entry
:param start: absolute path or None
:return: directory with a match on one of its entries
>>> up_dir(lambda x: False)
''
"""
if start is None:
start = os.getcwd()
if any(match(x) for x in os.listdir(start)):
return start
parent = os.path.dirname(start)
if start == parent:
rootres = start.replace('\\','/').strip('/').replace(':','')
if len(rootres)==1 and sys.platform=='win32':
rootres = ''
return rootres
return up_dir(match,start=parent)
Here is an example that will find all the .csv files in a specified directory "path" and all its root directories and print them:
import os
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith(".csv"):
path_file = os.path.join(root,file)
print(path_file)
If you want to start at one directory and work your way through the parents then this would work for finding all the .csv files (for example):
import os
import glob
last_dir = ''
dir = r'c:\temp\starting_dir'
os.chdir(dir)
while last_dir != dir:
dir = os.getcwd()
print(glob.glob('*.csv'))
os.chdir('..')
last_dir = os.getcwd()
I was looking for this too, since os.walk is exactly the opposite of what I wanted. That searches subdirectories. I wanted to search backwards through parent directories until I hit the drive root.
Bumming some inspiration from previous answers, below is what I am using. It doesn't require changing the working directory and it has a place for you to do something when you find a match. And you can change how the match is found. I'm using regex but a basic string compare would work fine too.
# Looking for a file with the string 'lowda' in it (like beltalowda or inyalowda)
import os
import re # only if you want to use regex
# Setup initial directories
starting_dir = 'C:\\Users\\AvasaralaC\\Documents\\Projects'
last_dir = ''
curr_dir = starting_dir
filename = ''
# Loop through parent directories until you hit the end or find a match
while last_dir != curr_dir:
for item in os.listdir(curr_dir):
if re.compile('.*lowda.*').search(item): # Here you can do your own comparison
filename = (curr_dir + os.path.sep + item)
break
if filename:
break
last_dir = curr_dir
curr_dir = os.path.abspath(curr_dir + os.path.sep + os.pardir)
Other comparisons you could do are item.lower().endswith('.txt') or some other string comparison.
Just wrote this to find the "images" directory, note '/' is Linux style
dir = os.getcwd()
while dir != '/' and not glob.glob( dir + '/images' ):
dir = os.path.dirname(dir)
I'm trying to organize some data before processing it.
What I have is a folder of raw tiff files (they're raster bands from a drone sensor).
I want to move these files into new, individual folders. e.g., IMG_001_1, IMG_001_2, IMG_001_3, IMG_001_4 and IMG_001_5 are all moved into a new folder titled IMG_001. I am ok with changing the naming structure of the files in order to make the code simpler.
An additional issue is that there are a few images missing from the folder. The current files are IMG0016 - IMG0054 (no IMG0055), IMG0056 - IMG0086 (no IMG0087), and IMG0087 - IMG0161. This is why I think it would be simpler to just rename the new image folders from 1-143.
My main problem is actually moving the files into the new folders - creating the folders is fairly simple.
Played around a little and this script came out, which should do what you want:
import os
import shutil
import re
UNORG = "C:\\Users\joshuarb\Desktop\Unorganized_Images\\"
ORG = "C:\\Users\joshuarb\Desktop\Organized_Images\\"
def main():
file_names = [os.path.join(UNORG, i) for i in get_files_of(UNORG)]
for count in range(0, 143):
current_dir = "{}IMG_{:04d}".format(ORG, count)
os.makedirs(current_dir)
move_files = get_files_to_move(file_names, count)
print move_files
for i in move_files:
shutil.move(i, os.path.join(current_dir, os.path.basename(i)))
def get_files_to_move(file_names, count):
return [i for i in file_names if re.match('.*IMG{}_.*'.format(count), i)]
def get_files_of(mypath):
(dirpath, dirnames, filenames) = os.walk(mypath).next()
return filenames
if __name__ == '__main__':
main()
As you see, the code is not commented. But feel free to ask if something is unclear;)
Problem solved!
import os
import shutil
srcpath = "C:\Users\joshuarb\Desktop\Python_Test\UnorganizedImages"
srcfiles = os.listdir(srcpath)
destpath = "C:\Users\joshuarb\Desktop\Python_Test\OrganizedImages"
# extract the three letters from filenames and filter out duplicates
destdirs = list(set([filename[0:8] for filename in srcfiles]))
def create(dirname, destpath):
full_path = os.path.join(destpath, dirname)
os.mkdir(full_path)
return full_path
def move(filename, dirpath):
shutil.move(os.path.join(srcpath, filename)
,dirpath)
# create destination directories and store their names along with full paths
targets = [
(folder, create(folder, destpath)) for folder in destdirs
]
for dirname, full_path in targets:
for filename in srcfiles:
if dirname == filename[0:8]:
move(filename, full_path)
I'm trying to write a python script to delete all files in a folder older than X days. This is what I have so far:
import os, time, sys
path = r"c:\users\%myusername%\downloads"
now = time.time()
for f in os.listdir(path):
if os.stat(f).st_mtime < now - 7 * 86400:
if os.path.isfile(f):
os.remove(os.path.join(path, f))
When I run the script, I get:
Error2 - system cannot find the file specified,
and it gives the filename. What am I doing wrong?
os.listdir() returns a list of bare filenames. These do not have a full path, so you need to combine it with the path of the containing directory. You are doing this when you go to delete the file, but not when you stat the file (or when you do isfile() either).
Easiest solution is just to do it once at the top of your loop:
f = os.path.join(path, f)
Now f is the full path to the file and you just use f everywhere (change your remove() call to just use f too).
I think the new pathlib thingy together with the arrow module for dates make for neater code.
from pathlib import Path
import arrow
filesPath = r"C:\scratch\removeThem"
criticalTime = arrow.now().shift(hours=+5).shift(days=-7)
for item in Path(filesPath).glob('*'):
if item.is_file():
print (str(item.absolute()))
itemTime = arrow.get(item.stat().st_mtime)
if itemTime < criticalTime:
#remove it
pass
pathlib makes it easy to list the directory contents, to access file characteristics such as as creation times and to get full paths.
arrow makes calculations of times easier and neater.
Here's the output showing the full paths offered by pathlib. (No need to join.)
C:\scratch\removeThem\four.txt
C:\scratch\removeThem\one.txt
C:\scratch\removeThem\three.txt
C:\scratch\removeThem\two.txt
You need to give it the path also or it will look in cwd.. which ironically enough you did on the os.remove but no where else...
for f in os.listdir(path):
if os.stat(os.path.join(path,f)).st_mtime < now - 7 * 86400:
i did it in more sufficient way
import os, time
path = "/home/mansoor/Documents/clients/AirFinder/vendors"
now = time.time()
for filename in os.listdir(path):
filestamp = os.stat(os.path.join(path, filename)).st_mtime
filecompare = now - 7 * 86400
if filestamp < filecompare:
print(filename)
You need to use if os.stat(os.path.join(path, f)).st_mtime < now - 7 * 86400: instead of if os.stat(f).st_mtime < now - 7 * 86400:
I find using os.path.getmtime more convenient :-
import os, time
path = r"c:\users\%myusername%\downloads"
now = time.time()
for filename in os.listdir(path):
# if os.stat(os.path.join(path, filename)).st_mtime < now - 7 * 86400:
if os.path.getmtime(os.path.join(path, filename)) < now - 7 * 86400:
if os.path.isfile(os.path.join(path, filename)):
print(filename)
os.remove(os.path.join(path, filename))
A simple python script to remove /logs/ files older than 10 days
#!/usr/bin/python
# run by crontab
# removes any files in /logs/ older than 10 days
import os, sys, time
from subprocess import call
def get_file_directory(file):
return os.path.dirname(os.path.abspath(file))
now = time.time()
cutoff = now - (10 * 86400)
files = os.listdir(os.path.join(get_file_directory(__file__), "logs"))
file_path = os.path.join(get_file_directory(__file__), "logs/")
for xfile in files:
if os.path.isfile(str(file_path) + xfile):
t = os.stat(str(file_path) + xfile)
c = t.st_ctime
# delete file if older than 10 days
if c < cutoff:
os.remove(str(file_path) + xfile)
With __file__ you can replace by your path.
This deletes files older than 60 days.
import os
directory = '/home/coffee/Documents'
os.system("find " + directory + " -mtime +60 -print")
os.system("find " + directory + " -mtime +60 -delete")
With comprehensions, Can be:
import os
from time import time
p='.'
result=[os.remove(file) for file in (os.path.join(path, file) for path, _, files in os.walk(p) for file in files) if os.stat(file).st_mtime < time() - 7 * 86400]
print(result)
remove files with match = os.remove(file)
loop for all files into path = for file in
generation with all files = (os.path.join(path, file) for path, _,
files in os.walk(p) for file in files)
p is a directory into filesystem
verify mtime to match= if os.stat(file).st_mtime < time() - 7 * 86400
May be see: https://ideone.com/Bryj1l
Here's how I do it on my Windows machines. It uses shutil to also remove subdirectories created in downloads. I also have a similar one to keep the folders cleaned up on the hard drive of my son's computer, as he has special needs and tends to let things get out of control fast.
import os, time, shutil
paths = (("C:"+os.getenv('HOMEPATH')+"\Downloads"), (os.getenv('TEMP')))
oneday = (time.time())- 1 * 86400
try:
for path in paths:
for filename in os.listdir(path):
if os.path.getmtime(os.path.join(path, filename)) < oneday:
if os.path.isfile(os.path.join(path, filename)):
print(filename)
os.remove(os.path.join(path, filename))
elif os.path.isdir(os.path.join(path, filename)):
print(filename)
shutil.rmtree((os.path.join(path, filename)))
os.remove(os.path.join(path, filename))
except:
pass
print("Maintenance Complete!")
Some of the other answers also have the same code but i feel they have overcomplicated a very simple process
import os
import time
#folder to clear from
dir_path = 'path of directory to clean'
#No of days before which the files are to be deleted
limit_days = 10
treshold = time.time() - limit_days*86400
entries = os.listdir(dir_path)
for dir in entries:
creation_time = os.stat(os.path.join(dir_path,dir)).st_ctime
if creation_time < treshold:
print(f"{dir} is created on {time.ctime(creation_time)} and will be deleted")
I might be a tad late to the party but this is my approach using pathlib's timestamp to convert date object to a float and compare it to file.stat().st_mtime
from pathlib import Path
import datetime as dt
from time import ctime
remove_before = dt.datetime.now()-dt.timedelta(days=10) files older than 10 days
removeMe = Path.home() / 'downloads' # points to :\users\%myusername%\
for file in removeMe.iterdir():
if remove_before.timestamp() > file.stat().st_mtime:
print(ctime(file.stat().st_mtime))
file.unlink() # to delete the file
would like to add what i came up with to do this task.
the function is called in the login process.
def remove_files():
removed=0
path = "desired path"
# Check current working directory.
dir_to_search = os.getcwd()
print "Current working directory %s" % dir_to_search
#compare current to desired directory
if dir_to_search != "full desired path":
# Now change the directory
os.chdir( desired path )
# Check current working directory.
dir_to_search = os.getcwd()
print "Directory changed successfully %s" % dir_to_search
for dirpath, dirnames, filenames in os.walk(dir_to_search):
for file in filenames:
curpath = os.path.join(dirpath, file)
file_modified = datetime.datetime.fromtimestamp(os.path.getmtime(curpath))
if datetime.datetime.now() - file_modified > datetime.timedelta(hours=1):
os.remove(curpath)
removed+=1
print(removed)
I'm looking for a way to include/exclude files patterns and exclude directories from a os.walk() call.
Here's what I'm doing by now:
import fnmatch
import os
includes = ['*.doc', '*.odt']
excludes = ['/home/paulo-freitas/Documents']
def _filter(paths):
for path in paths:
if os.path.isdir(path) and not path in excludes:
yield path
for pattern in (includes + excludes):
if not os.path.isdir(path) and fnmatch.fnmatch(path, pattern):
yield path
for root, dirs, files in os.walk('/home/paulo-freitas'):
dirs[:] = _filter(map(lambda d: os.path.join(root, d), dirs))
files[:] = _filter(map(lambda f: os.path.join(root, f), files))
for filename in files:
filename = os.path.join(root, filename)
print(filename)
Is there a better way to do this? How?
This solution uses fnmatch.translate to convert glob patterns to regular expressions (it assumes the includes only is used for files):
import fnmatch
import os
import os.path
import re
includes = ['*.doc', '*.odt'] # for files only
excludes = ['/home/paulo-freitas/Documents'] # for dirs and files
# transform glob patterns to regular expressions
includes = r'|'.join([fnmatch.translate(x) for x in includes])
excludes = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'
for root, dirs, files in os.walk('/home/paulo-freitas'):
# exclude dirs
dirs[:] = [os.path.join(root, d) for d in dirs]
dirs[:] = [d for d in dirs if not re.match(excludes, d)]
# exclude/include files
files = [os.path.join(root, f) for f in files]
files = [f for f in files if not re.match(excludes, f)]
files = [f for f in files if re.match(includes, f)]
for fname in files:
print fname
From docs.python.org:
os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
When topdown is True, the caller can modify the dirnames list in-place … this can be used to prune the search …
for root, dirs, files in os.walk('/home/paulo-freitas', topdown=True):
# excludes can be done with fnmatch.filter and complementary set,
# but it's more annoying to read.
dirs[:] = [d for d in dirs if d not in excludes]
for pat in includes:
for f in fnmatch.filter(files, pat):
print os.path.join(root, f)
I should point out that the above code assumes excludes is a pattern, not a full path. You would need to adjust the list comprehension to filter if os.path.join(root, d) not in excludes to match the OP case.
why fnmatch?
import os
excludes=....
for ROOT,DIR,FILES in os.walk("/path"):
for file in FILES:
if file.endswith(('doc','odt')):
print file
for directory in DIR:
if not directory in excludes :
print directory
not exhaustively tested
dirtools is perfect for your use-case:
from dirtools import Dir
print(Dir('.', exclude_file='.gitignore').files())
Here is one way to do that
import fnmatch
import os
excludes = ['/home/paulo-freitas/Documents']
matches = []
for path, dirs, files in os.walk(os.getcwd()):
for eachpath in excludes:
if eachpath in path:
continue
else:
for result in [os.path.abspath(os.path.join(path, filename)) for
filename in files if fnmatch.fnmatch(filename,'*.doc') or fnmatch.fnmatch(filename,'*.odt')]:
matches.append(result)
print matches
import os
includes = ['*.doc', '*.odt']
excludes = ['/home/paulo-freitas/Documents']
def file_search(path, exe):
for x,y,z in os.walk(path):
for a in z:
if a[-4:] == exe:
print os.path.join(x,a)
for x in includes:
file_search(excludes[0],x)
This is an example of excluding directories and files with os.walk():
ignoreDirPatterns=[".git"]
ignoreFilePatterns=[".php"]
def copyTree(src, dest, onerror=None):
src = os.path.abspath(src)
src_prefix = len(src) + len(os.path.sep)
for root, dirs, files in os.walk(src, onerror=onerror):
for pattern in ignoreDirPatterns:
if pattern in root:
break
else:
#If the above break didn't work, this part will be executed
for file in files:
for pattern in ignoreFilePatterns:
if pattern in file:
break
else:
#If the above break didn't work, this part will be executed
dirpath = os.path.join(dest, root[src_prefix:])
try:
os.makedirs(dirpath,exist_ok=True)
except OSError as e:
if onerror is not None:
onerror(e)
filepath=os.path.join(root,file)
shutil.copy(filepath,dirpath)
continue;#If the above else didn't executed, this will be reached
continue;#If the above else didn't executed, this will be reached
python >=3.2 due to exist_ok in makedirs
The above methods had not worked for me.
So, This is what I came up with an expansion of my original answer to another question.
What worked for me was:
if (not (str(root) + '/').startswith(tuple(exclude_foldr)))
which compiled a path and excluded the tuple of my listed folders.
This gave me the exact result I was looking for.
My goal for this was to keep my mac organized.
I can Search any folder by path, locate & move specific file.types, ignore subfolders and i preemptively prompt the user if they want to move the files.
NOTE: the Prompt is only one time per run and is NOT per file
By Default the prompt defaults to NO when you hit enter instead of [y/N], and will just list the Potential files to be moved.
This is only a snippet of my GitHub Please visit for the total script.
HINT: Read the script below as I added info per line as to what I had done.
#!/usr/bin/env python3
# =============================================================================
# Created On : MAC OSX High Sierra 10.13.6 (17G65)
# Created On : Python 3.7.0
# Created By : Jeromie Kirchoff
# =============================================================================
"""THE MODULE HAS BEEN BUILD FOR KEEPING YOUR FILES ORGANIZED."""
# =============================================================================
from os import walk
from os import path
from shutil import move
import getpass
import click
mac_username = getpass.getuser()
includes_file_extensn = ([".jpg", ".gif", ".png", ".jpeg", ])
search_dir = path.dirname('/Users/' + mac_username + '/Documents/')
target_foldr = path.dirname('/Users/' + mac_username + '/Pictures/Archive/')
exclude_foldr = set([target_foldr,
path.dirname('/Users/' + mac_username +
'/Documents/GitHub/'),
path.dirname('/Users/' + mac_username +
'/Documents/Random/'),
path.dirname('/Users/' + mac_username +
'/Documents/Stupid_Folder/'),
])
if click.confirm("Would you like to move files?",
default=False):
question_moving = True
else:
question_moving = False
def organize_files():
"""THE MODULE HAS BEEN BUILD FOR KEEPING YOUR FILES ORGANIZED."""
# topdown=True required for filtering.
# "Root" had all info i needed to filter folders not dir...
for root, dir, files in walk(search_dir, topdown=True):
for file in files:
# creating a directory to str and excluding folders that start with
if (not (str(root) + '/').startswith(tuple(exclude_foldr))):
# showcase only the file types looking for
if (file.endswith(tuple(includes_file_extensn))):
# using path.normpath as i found an issue with double //
# in file paths.
filetomove = path.normpath(str(root) + '/' +
str(file))
# forward slash required for both to split
movingfileto = path.normpath(str(target_foldr) + '/' +
str(file))
# Answering "NO" this only prints the files "TO BE Moved"
print('Files To Move: ' + str(filetomove))
# This is using the prompt you answered at the beginning
if question_moving is True:
print('Moving File: ' + str(filetomove) +
"\n To:" + str(movingfileto))
# This is the command that moves the file
move(filetomove, movingfileto)
pass
# The rest is ignoring explicitly and continuing
else:
pass
pass
else:
pass
else:
pass
if __name__ == '__main__':
organize_files()
Example of running my script from terminal:
$ python3 organize_files.py
Exclude list: {'/Users/jkirchoff/Pictures/Archive', '/Users/jkirchoff/Documents/Stupid_Folder', '/Users/jkirchoff/Documents/Random', '/Users/jkirchoff/Documents/GitHub'}
Files found will be moved to this folder:/Users/jkirchoff/Pictures/Archive
Would you like to move files?
No? This will just list the files.
Yes? This will Move your files to the target folder.
[y/N]:
Example of listing files:
Files To Move: /Users/jkirchoff/Documents/Archive/JayWork/1.custom-award-768x512.jpg
Files To Move: /Users/jkirchoff/Documents/Archive/JayWork/10351458_318162838331056_9023492155204267542_n.jpg
...etc
Example of moving files:
Moving File: /Users/jkirchoff/Documents/Archive/JayWork/1.custom-award-768x512.jpg
To: /Users/jkirchoff/Pictures/Archive/1.custom-award-768x512.jpg
Moving File: /Users/jkirchoff/Documents/Archive/JayWork/10351458_318162838331056_9023492155204267542_n.jpg
To: /Users/jkirchoff/Pictures/Archive/10351458_318162838331056_9023492155204267542_n.jpg
...