I setup the Dropbox uploader script for the Raspberry pi and was able to successfully upload a file to Dropbox, but now I need to setup an auto script to upload the files into a specific destination folder within Dropbox. I recently found a script that would do just that, but my problem with it is that I cant specify a Destination folder within Dropbox. Other users replied on the forum post asking for an input for a destination folder as well but the post has been inactive for months.
https://github.com/andreafabrizi/Dropbox-Uploader
https://www.raspberrypi.org/forums/viewtopic.php?t=164166
I researched other stackoverflow posts related to this problem, but they would not work in my circumstance. Both scripts work great but I would like to see if its possible to alter the script to specify a destination folder within dropbox.
syncdir is the local folder for upload.
I need an input for something like "/dropbox/TeamFolder" instead of just uploading the files straight into my Dropbox user directory.
import os
import subprocess
from subprocess import Popen, PIPE
#The directory to sync
syncdir="/home/pi/Dropbox-Files/"
#Path to the Dropbox-uploaded shell script
uploader = "/home/pi/Dropbox-Uploader/dropbox_uploader.sh"
#If 1 then files will be uploaded. Set to 0 for testing
upload = 1
#If 1 then don't check to see if the file already exists just upload it, if 0 don't upload if already exists
overwrite = 0
#If 1 then crawl sub directories for files to upload
recursive = 1
#Delete local file on successfull upload
deleteLocal = 0
#Prints indented output
def print_output(msg, level):
print((" " * level * 2) + msg)
#Gets a list of files in a dropbox directory
def list_files(path):
p = Popen([uploader, "list", path], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[0].decode("utf-8")
fileList = list()
lines = output.splitlines()
for line in lines:
if line.startswith(" [F]"):
line = line[5:]
line = line[line.index(' ')+1:]
fileList.append(line)
return fileList
#Uploads a single file
def upload_file(localPath, remotePath):
p = Popen([uploader, "upload", localPath, remotePath], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[0].decode("utf-8").strip()
if output.startswith("> Uploading") and output.endswith("DONE"):
return 1
else:
return 0
#Uploads files in a directory
def upload_files(path, level):
fullpath = os.path.join(syncdir,path)
print_output("Syncing " + fullpath,level)
if not os.path.exists(fullpath):
print_output("Path not found: " + path, level)
else:
#Get a list of file/dir in the path
filesAndDirs = os.listdir(fullpath)
#Group files and directories
files = list()
dirs = list()
for file in filesAndDirs:
filepath = os.path.join(fullpath,file)
if os.path.isfile(filepath):
files.append(file)
if os.path.isdir(filepath):
dirs.append(file)
print_output(str(len(files)) + " Files, " + str(len(dirs)) + " Directories",level)
#If the path contains files and we don't want to override get a list of files in dropbox
if len(files) > 0 and overwrite == 0:
dfiles = list_files(path)
#Loop through the files to check to upload
for f in files:
print_output("Found File: " + f,level)
if upload == 1 and (overwrite == 1 or not f in dfiles):
fullFilePath = os.path.join(fullpath,f)
relativeFilePath = os.path.join(path,f)
print_output("Uploading File: " + f,level+1)
if upload_file(fullFilePath, relativeFilePath) == 1:
print_output("Uploaded File: " + f,level + 1)
if deleteLocal == 1:
print_output("Deleting File: " + f,level + 1)
os.remove(fullFilePath)
else:
print_output("Error Uploading File: " + f,level + 1)
#If recursive loop through the directories
if recursive == 1:
for d in dirs:
print_output("Found Directory: " + d, level)
relativePath = os.path.join(path,d)
upload_files(relativePath, level + 1)
#Start
upload_files("",1)
When you use the dropbox_uploader.sh script you specify the folder to save the file to on the Dropbox account. However, that is limited to whatever settings you gave the "app" in the Dropbox settings to get your access token. You can set it to allow reading/writing anyplace in your Dropbox account or only in a specific folder.
Look for "Permission Type" and "App Folder Name" on the Dropbox apps setup page: https://www.dropbox.com/developers/apps
Related
I'm working with a script that downloads files from cloud firebase storage, stores them locally in a folder "assets", and renames the references to those files internally on each target .md note file.
The issue is just with the last step, where the temp file with the new reference needs to replace the old file. I receive the error "PermissionError: [WinError 32] The process cannot access the file because it is being used by another process"
I tried a few things, but eventually found this source: https://bugs.python.org/issue46003
It seems there is a persistent bug with the os.replace() function on Windows, and I don't know how to implement a workaround in my context. I considered attempting to add delay as suggested in the thread, but this doesn't seem like the best option since there are hundreds of references I need to modify.
Here is the code:
(the area in question is at the end "if os.path.exists(fullTempFilePath)..."
import re
import glob
import os
import requests
import calendar
import time
vaultDir = '/Users/kacar/Firebase Temp/Firebase-Test'
firebaseShort = 'none'
fullRead = 'none'
fileFullPath = ''
fullTempFilePath = ''
i = 0
ext = ''
/# Walk through all files in all directories within the specified vault directory
for subdir, dirs, files in os.walk(vaultDir):
for file in files:
# Open file in directory
fileFullPath = os.path.join(subdir,file)
fhand = open(fileFullPath, errors='ignore')
for line in fhand:
# Download the Firebase file and save it in the assets folder
if 'firebasestorage' in line:
try:
# If it's a PDF, it will be in the format {{pdf: link}}
if '{{pdf:' in line:
link = re.search(r'https://firebasestorage(.*)\?alt(.*)\}', line)
else:
link = re.search(r'https://firebasestorage(.*)\?alt(.*)\)', line)
firebaseShort = 'https://firebasestorage' + link.group(1) # https://firebasestorage.googleapis.com/v0/b/firescript-577a2.appspot.com/o/imgs%2Fapp%2FDownloadMyBrain%2FLy4Wel-rjk.png
firebaseUrl = link.group(0)[:-1] # https://firebasestorage.googleapis.com/v0/b/firescript-577a2.appspot.com/o/imgs%2Fapp%2FDownloadMyBrain%2FLy4Wel-rjk.png?alt=media&token=0fbafc8f-0a47-4720-9e68-88f70803ced6
# Download the file locally
r = requests.get(firebaseUrl)
timestamp = calendar.timegm(time.gmtime())
# Get file extension of file. Ex: .png; .jpeg
reg = re.search(r'(.*)\.(.+)', firebaseShort[-5:]) # a.png / .jpeg
ext = '.' + reg.group(2) # .jpeg
# Create assets folder if it doesn't exist
if not os.path.exists(vaultDir + '/assets'):
os.makedirs(vaultDir + '/assets')
# Create new local file out of downloaded firebase file
newFilePath = 'assets/' + str(timestamp) + '_' + str(i) + ext
# print(firebaseUrl + '>>>' + newFilePath)
with open(vaultDir + '/' + newFilePath,'wb') as output_file:
output_file.write(r.content)
except AttributeError: # This is to prevent the AttributeError exception when no matches are returned
continue
# Save Markdown file with new local file link as a temp file
# If there is already a temp version of a file, open that.
fullTempFilePath = vaultDir + '/temp_' + file
if os.path.exists(fullTempFilePath):
fullRead = open(fullTempFilePath, errors='ignore')
else:
fullRead = open(fileFullPath, errors='ignore')
data = fullRead.read()
data = data.replace(firebaseUrl,newFilePath)
fullRead.close()
with open(fullTempFilePath,'wt') as temp_file:
temp_file.write(data)
i = i + 1
if os.path.exists(fullTempFilePath):
path = os.replace(fullTempFilePath,fileFullPath)
# Close file
fhand.close()
I'm trying to delete a file in a certain folder. I tried to use this command:
file_exists = os.path.exists(line1[0] + '.xlsx')
if file_exists:
find_file = m.find(line1[0] + ".xlsx")
if find_file:
delete_file = m.delete(find_file[0])
The problem is there are multiple files with the same name in different folders. The folder's name is the date that the folder is created. The file name is the name of a course code, eg. BH2952. When I use the above command, all the files that has the name BH2952 are deleted. But I only want to delete the file in today's date folder. Does anyone know how to do this?
And here is the full code that I've done:
import os
import os.path
import sys
from pathlib import Path
from os.path import exists
from datetime import datetime
today = datetime.now()
from mega import Mega
mega = Mega()
# Login to MEGA
m = mega.login('nurul.syamsina1202#gmail.com', 'Syamsina990212')
# Get user details
details = m.get_user()
# Get account disk quota
quota = m.get_quota()
# Get account storage space
''' specify unit output kilo, mega, gig, else bytes will output '''
space = m.get_storage_space(kilo=True)
# Get accounts file
files = m.get_files()
# Create a folder on Mega if the file hasn't been created yet
''' Excludes results which are in the Trash folder (i.e. deleted) '''
folder = m.find("Fourth_Year_Students", exclude_deleted=True)
if not folder:
m.create_folder("Fourth_Year_Students")
subfolder = m.find("Fourth_Year_Students/" + today.strftime('%d%m%Y'), exclude_deleted=True)
if not subfolder:
m.create_folder("Fourth_Year_Students/" + today.strftime('%d%m%Y'))
# Change directory to today's date folder
os.chdir(r"C:/OpenVino/excel_report/Fourth_Year_Students/" + today.strftime('%d%m%Y'))
os.getcwd()
# read class schedule file
file1 = open(r"C:/OpenVino/excel_report/class_codes_and_names.txt", "r")
lines_1 = file1.readlines()
for line1 in lines_1:
line1 = line1.strip('\n')
line1 = line1.split(",")
#os.chdir(directory_name)
file_exists = os.path.exists(line1[0] + '.xlsx')
if file_exists:
find_file = m.find(line1[0] + ".xlsx")
if find_file:
delete_file = m.delete(find_file[0])
# Upload a file and get its public link
folder = m.find("Fourth_Year_Students/" + today.strftime('%d%m%Y'))
file = m.upload(line1[0] + '.xlsx', folder[0])
link = m.get_upload_link(file)
print('\nFile',line1[0],'is ready. To view the file, please click on the link below:\n',link)
# see mega.py for destination and filename options
else:
continue
m.empty_trash()
print("All files have been successfully uploaded in the cloud.")
I'm trying to code an automatic script for uploading to the gdrive with rclone.
I will not go through all the code only in this check statement, the rclone command checks files from the local folder and mounted folder something like this:
rclone check "local folder" "mounted folder" --ignore existing --onlyoneway
it returns in terminal some data that can't be stored in a text file or I don't now how.
def upload_check():
print(" check if all files are uploaded ")
global Error_upload
if :#I stuck here, rclone check and return true or false if all files are uploaded by name and size
Error_upload = True
return Error_upload
print("Not uploaded ")#---------------------------
else:# all good
Error_upload = False
return Error_upload
print("all files are online")#---------------------------
my question is how to properly check two directories if they are identical by all files inside and files size and returning Boolean True or False?
After a few days I come up with this complicated solution:
import shutil
import os
local = "Local/"
destination = "uploaded/"
checkfile = "logfile.txt"
def upload_check():
print(" check if all files are uploaded ")
global Error_upload
os.system("rclone check 'Local' 'gdrive' --one-way -vv -P --combined logfile.txt")
destination = "uploaded/"
checkfile = "logfile.txt"
search = "=" # move from the folder successfuly uplouded files
list_of_files = []
lines = []
folders = []
uniq_folder_list = []
shutil_l = []
shutil_f = []
for line in open(checkfile, "r"):
if search in line:
list_of_files = line.split("/")[1]
lines.append(list_of_files.rstrip())
list_of_folders = line.split(" ")[1].split("/")[0]
folders.append(list_of_folders.rstrip())
[uniq_folder_list.append(n) for n in folders if n not in uniq_folder_list]
for new_folder in uniq_folder_list:
if not os.path.exists(destination + new_folder):
os.makedirs(destination + new_folder)
for l, f in zip(lines, folders):
l1 = (local + f + "/" + l)
f1 = (destination + f)
shutil_l.append(l1.rstrip())
shutil_f.append(f1.rstrip())
for src, dest in zip(shutil_l, shutil_f):
shutil.move(src,dest)
os.system("rclone check 'Local' 'gdrive' --one-way -vv -P --combined logfile.txt")
with open(checkfile, 'r') as read_obj:
one_char = read_obj.read(1)
if not one_char:
Error_upload = False
return Error_upload
print("all files are online")
else:
Error_upload = True
return Error_upload
print("Not uploaded ")
First I created some files and a couple of them uploaded them to the drive, also one corrupted file. Than this scrip do the job.
The file logfile.txt contains a list generated with rclone
rclone check 'Local' 'gdrive' --one-way -vv -P --combined logfile.txt
this bash command will generate a logfile:
+ 20_10_10/IMG_1301-00006.jpg
+ 20_10_10/IMG_1640-00007.jpg
+ 20_10_10/IMG_1640-00008.jpg
+ 20_10_10/IMG_1640-00009.jpg
+ 20_10_10/IMG_1640-00010.jpg
+ 20_10_10/IMG_1640-00011.jpg #missing on remote
* 20_10_10/IMG_1301-00004.jpg #corrupted file
= 20_10_10/IMG_1301-00005.jpg
= 20_10_10/IMG_1301-00003.jpg
= 20_10_10/IMG_1301-00001.jpg
= 20_10_09/IMG_2145-00028.jpg
= 20_10_10/IMG_1301-00002.jpg
more info on rclone check help
on rclone. The files with "=" are identical on local and remote destination, so we want to move them from the source folder to an uploaded folder.
The script runs again and if the read function can't read anything, all files are online and the upload function does not need to run again. But since there are un uploaded files and a corrupted file (it can happened if the connection is lost while uploading) the script will run the upload function or what ever other function triggered by if function with variable "Error_upload"
just for reference:
if Error_upload == True:
print("All files are on the cloud")
else:
upload() #your upload function
upload_check()
I certainly know that this code could be simpler and improved.
Im trying to create a program that deletes files after X days. I got that part working, but now I need to write a code that deletes the file they're located at ONLY if it's empty
This is for a server that is taking log files from builds, and those are getting placed in thousands of other folders. I need to delete those upper level folders if they are empty
import os
import shutil
import sys
import time
#for path you need to use the location of the log files
path = "C:/GroupData"
# Check current working directory.
retval = os.getcwd()
#this will list the name of the current directory
print("Current working directory %s" % retval)
# Now change the directory
#You will put the same location as you did for path
os.chdir('C:/GroupData/Temp')
workdir = os.getcwd()
# Check current working directory.
retval = os.getcwd()
#this will list the new name of the working directory
print("Directory changed successfully %s" % retval)
from subprocess import call
def get_file_directory(file):
return os.path.dirname(os.path.abspath(file))
#this code is getting what today's date is
now = time.time()
#this code is saying that files that are older than todays date by 7 days will be deleted
cutoff = now - (10 * 86400)
files = os.listdir(os.path.join(get_file_directory('C://GroupData//Temp'), "temp"))
file_path = os.path.join(get_file_directory('C://GroupData//Temp'), "temp/")
#this locates what the file name is and looks to see how long ago it was modified
for xfile in files:
files1 = os.listdir(os.path.join(get_file_directory('C://GroupData//Temp//' + xfile), xfile))
file_path1 = os.path.join(get_file_directory('C://GroupData//Temp//' + xfile), xfile)
for xfile1 in files1:
if os.path.isfile(str(file_path1) + "\\" + xfile1):
t = os.stat(str(file_path1) + "\\" + xfile1)
#m is how long ago it was last modified
m = t.st_mtime
#if the cutoff date is older than the modified date the file will be deleted
if m < cutoff:
#if the file IS older than the cutoff os.remove deletes the file from the path
os.remove(str(file_path1) + "\\" + xfile1)
files = os.listdir(os.path.join(get_file_directory('C://GroupData//Temp'), "temp"))
file_path = os.path.join(get_file_directory('C://GroupData//Temp'), "temp/")
os.rmdir(str(file_path1) + "\\")
The code at the bottom works, but only for one file at a time, and I need it to do it as many times as possible so that it can delete all the empty files at once, as this will be run automatically
I'm trying to download several folders from an ftp server with Python 3 using ftplib.
I have a list of the names of the folders. They are all located in a folder 'root'. The problem is that I don't know how to navigate through them. When I use cwdI can go to a deeper directory, but how do I get up again?
I'm trying to get something like
list = ["folder1", "folder2", "folder3"]
for folder in list:
##navigate to folder
##do something
You can retrieve current directory using FTP.pwd method. Remember that directory before change directory.
parent_dir = ftp_object.pwd()
list = ["folder1", "folder2", "folder3"]
for folder in list:
ftp_object.cwd('{}/{}'.format(parent_dir, folder))
ftp_object.cwd(parent_dir) # go to parent directory
I made some changes to code I found here
You have to make the destination folder before running the code.
Also, the site I used did not require username or pass.
Please let me know if this works. I am wondering if I should "put this in my back pocket" and save it to my external hard drive.
#!/usr/bin/python
import sys
import ftplib
import urllib.request
import os
import time
import errno
server = "ftp.analog.com"
#user = ""
#password = ""
source = "/pub/MicroConverter/ADuCM36x/"
destination0 = "C:/NewFolder/" # YOU HAVE TO UT THIS NEW FOLDER IN C: BEFORE RUNNING
interval = 0.05
ftp = ftplib.FTP(server)
ftp.login()#(user, password)
count = 0 #We need this variable to make the first folder correctly
def downloadFiles(path, destination):
try:
ftp.cwd(path)
os.chdir(destination)
mkdir_p(destination[0:len(destination)-1] + path)
print ("Created: " + destination[0:len(destination)-1] + path )
except OSError:
pass
except ftplib.error_perm:
print ( "Error: could not change to " + path )
sys.exit("Ending Application")
filelist=ftp.nlst()
print(filelist)
for file in filelist:
time.sleep(interval)
if "." in file :
url = ("ftp://" + server + path + file)
urllib.request.urlretrieve(url, destination + path + file)
else:
try:
ftp.cwd(path + file + "/")
downloadFiles(path + file + "/", destination)
except ftplib.error_perm:
os.chdir(destination[0:len(destination)-1] + path)
try:
ftp.retrbinary("RETR " + file, open(os.path.join(destination + path, file),"wb").write)
print ("Downloaded: " + file)
except:
print ("Error: File could not be downloaded " + file)
return
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
downloadFiles(source, destination0)
#ftp.quit()