Python FTP server download Latest File with specific keywords in filename - python

I want to download the most recent file from FTP server with python. I am able to connect to the server and download all the files in a particular directory but I do not know how to find the most recent file with the specific keyword in the subject.
Following is the code i am using. But it returns all the files with *.png keyname. I do not know how to apply os.path.getctime here to get the latest file.Thats all the help i wanted.
import ftplib
import os
ftp = ftplib.FTP('test.rebex.net', 'demo','password')
ftp.retrlines('LIST')
ftp.cwd("/pub")
ftp.retrlines('LIST')
ftp.cwd("example")
ftp.retrlines('LIST')
filematch='*.png'
target_dir='C:/Users/muzamal.pervez/Desktop/OPD Claims'
for filename in ftp.nlst(filematch):
target_file_name = os.path.join(target_dir,os.path.basename(filename))
with open(target_file_name,'wb') as fhandle:
ftp.retrbinary('RETR %s' %filename, fhandle.write)

resolved.
import ftplib
import os
import time
from dateutil import parser
ftp = ftplib.FTP('test.rebex.net', 'demo','password')
ftp.retrlines('LIST')
ftp.cwd("pub")
ftp.cwd("example")
ftp.retrlines('LIST')
names = ftp.nlst()
final_names= [line for line in names if 'client' in line]
latest_time = None
latest_name = None
for name in final_names:
time = ftp.sendcmd("MDTM " + name)
if (latest_time is None) or (time > latest_time):
latest_name = name
latest_time = time
print(latest_name)
file = open(latest_name, 'wb')
ftp.retrbinary('RETR '+ latest_name, file.write)

Related

Ftplib upload failed on second upload, directory not empty

I'm trying to upload all my files to a time_name directory (such as "170905_161330") in my ftp server.
# -*- coding:utf-8 -*-
import ftplib
import os
import datetime
CWD = os.getcwd()
NOW_STR = str(datetime.datetime.now())
LOCAL_STR = "HDD1/AutoBackUp/" + NOW_STR[2:4]+NOW_STR[5:7]+NOW_STR[8:10]+"_"+NOW_STR[11:13]+NOW_STR[14:16]+NOW_STR[17:19]+"/"
FTP_ADDRESS = 'FTP_ADDRESS'
FTP_ID = '1'
FTP_PW = '1'
ftp = ftplib.FTP(FTP_ADDRESS, FTP_ID, FTP_PW)
for i in os.listdir(CWD):
FILENAME = str(i)
print(FILENAME)
ftp.mkd(LOCAL_STR)
LOCAL_NAME = LOCAL_STR + FILENAME
print(str(LOCAL_NAME))
with open(FILENAME, 'rb') as fileOBJ:
ftp.storlines('STOR ' + str(LOCAL_NAME), fileOBJ)
ftp.quit()
But the error
ftplib.error_perm: 550 HDD1/AutoBackUp/170905_160635/: Directory not empty
continues to appear, while the first file is uploaded correctly. After that, it doesn't work.
I can check my first file in ftp server, but yeah. second file doesn't exist.
I guess... storlines function only works when upload folder is empty.
How can I solve this problem?
From a very rapid read of your code, I suspect that the problem is in ftp.mkd. You already created the directory at the first iteration of the for loop.
To test this error on your local system, open the terminal:
write a mkdir test command
write a mkdir test again
You'll see an error: File Exist. I think the directory not empty is gnerated from this error in the server.
Modify your code to put ftp.mkd before the for loop:
ftp.mkd(LOCAL_STR)
for i in os.listdir(CWD):
FILENAME = str(i)
print(FILENAME)
LOCAL_NAME = LOCAL_STR + FILENAME
print(str(LOCAL_NAME))
with open(FILENAME, 'rb') as fileOBJ:
ftp.storlines('STOR ' + str(LOCAL_NAME), fileOBJ)
ftp.quit()
and test it again. Please remember to remove the directory from the server before testing it.

Script reading files in ftp directory but not downloading them

The script below is able to read the files in the ftp directory file however it does not download them. I know they read them because the outputted list in the command window shows them.
from ftplib import FTP
import os, sys, os.path
def handleDownload(block):
file.write(block)
ddir='U:/Test Folder'
os.chdir(ddir)
ftp = FTP('sidads.colorado.edu')
ftp.login()
print ('Logging in.')
directory = '/pub/DATASETS/NOAA/G02158/unmasked/2012/04_Apr/'
print ('Changing to ' + directory)
ftp.cwd(directory)
ftp.retrlines('LIST')
print ('Accessing files')
for subdir, dirs, files in os.walk(directory):
for file in files:
full_fname = os.path.join(root, fname);
print ('Opening local file ')
ftp.retrbinary('RETR U:/Test Folder' + fname,
handleDownload,
open(full_fname, 'wb'));
print ('Closing file ' + filename)
file.close();
ftp.close()
Here is one way you can do this using the pysftp library:
import pysftp
with pysftp.Connection('hostname', username='username', password='password') as sftp:
ftp_files = sftp.listdir('/ftp/dir/')
for file in ftp_files:
sftp.get(os.path.join('/ftp/dir/', file), localpath=os.path.join('/path/to/save/file/locally/', file))

"Permission denied" error from downloading all files from FTP folder

So far I have the gotten the names of the files I need from the FTP site. See code below.
from ftplib import FTP
import os, sys, os.path
def handleDownload(block):
file.write(block)
ddir='U:/Test Folder'
os.chdir(ddir)
ftp = FTP('sidads.colorado.edu')
ftp.login()
print ('Logging in.')
directory = '/pub/DATASETS/NOAA/G02158/unmasked/2012/04_Apr/'
print ('Changing to ' + directory)
ftp.cwd(directory)
ftp.retrlines('LIST')
print ('Accessing files')
filenames = ftp.nlst() # get filenames within the directory
print (filenames)
Where I am running into trouble is the download of the files into a folder. The code below is something I have tried however I receive the permission error due to the file not being created before I write to it.
for filename in filenames:
local_filename = os.path.join('C:/ArcGis/New folder', filename)
file = open(local_filename, 'wb')
ftp.retrbinary('RETR '+ filename, file.write)
file.close()
ftp.quit()
Here is the error and callback.
The directory listing includes the . reference to the folder (and probably also .. reference to the parent folder).
You have to skip it, you cannot download it (them).
for filename in filenames:
if (filename != '.') and (filename != '..'):
local_filename = os.path.join('C:/ArcGis/New folder', filename)
file = open(local_filename, 'wb')
ftp.retrbinary('RETR '+ filename, file.write)
file.close()
Actually you have to skip all folders in the listing.

Downloading from an ftp using Python

I have a piece of code in Python to download files from an ftp. The code downloads the very first file in the list of available days but fails to download the second. What could be the problem?
import os, ftplib
destdir='D:\precipitation\dl'
ftp = ftplib.FTP('ftp.itc.nl')
ftp.login('anonymous', '')
ftp.cwd('pub/mpe/msg')
available_days=['summsgmpe_20100101.zip','summsgmpe_20100102.zip', 'summsgmpe_20100103.zip', 'summsgmpe_20100104.zip', 'summsgmpe_20100105.zip', 'summsgmpe_20100106.zip', 'summsgmpe_20100107.zip', 'summsgmpe_20100108.zip']
hdfs = list()
for day in available_days :
file = available_days[available_days.index(day)]
print 'file=', file
local_file = os.path.join(destdir, file)
ftp.retrbinary('RETR %s' %file, open(local_file, 'wb').write)
hdfs.append(os.path.abspath(local_file))
ftp.cwd('..')
ftp.quit()
Remove your call to ftp.cwd(..)
That's moving up a directory for each iteration of the list, instead of staying in the correct folder where the files are.

Python rename ftp upload files delete

I have a script that renames files before uploading them to an FTP. First it searched for the pattern "_768x432_1700_m30_" and if it find it the pattern gets replaced by "new" - then it uploads all ".mp4" files in the directory to an FTP server. But for some reason I can't seem to delete the files after them have been uploaded? Also is there a better way of doing this script? (I am fairly new to python)
#!/usr/bin/python
import os
import glob
import fnmatch
import sys
import ftplib
import shutil
import re
from ftplib import FTP
Host='xxxxxx.xxxxx.xxxx.com'
User='xxxxxxx'
Passwd='xxxxxxx'
ftp = ftplib.FTP(Host,User,Passwd) # Connect
dest_dir = '/8619/_!/xxxx/xx/xxxxx/xxxxxx/xxxx/'
Origin_dir = '/8619/_!/xxxx/xx/xxxxx/xxxxxx/xxxx/'
pattern = '*.mp4'
file_list = os.listdir(Origin_dir)
for filename in glob.glob(os.path.join(Origin_dir, "*_768x432_1700_m30_*")):
os.rename(filename, filename.replace('_768x432_1700_m30_','_new_' ))
video_list = fnmatch.filter(filename, pattern)
print(video_list)
print "Checking %s for files" % Origin_dir
for files in file_list:
if fnmatch.fnmatch(files, pattern):
print(files)
print "logging into %s FTP" % Host
ftp = FTP(Host)
ftp.login(User, Passwd)
ftp.cwd(dest_dir)
print "uploading files to %s" % Host
ftp.storbinary('STOR ' + dest_dir+files, open(Origin_dir+files, "rb"), 1024)
ftp.close
print 'FTP connection has been closed'
On the following line
ftp.storbinary('STOR ' + dest_dir+files, open(Origin_dir+files, "rb"), 1024)
you open a file, but you don't keep a reference to it and close it. On Windows (I assume you are running this on Windows), a file can not be deleted while a process has it open.
Try the following instead:
print "uploading files to %s" % Host
with open(Origin_dir+files, "rb") as f:
ftp.storbinary('STOR ' + dest_dir+files, f, 1024)
ftp.close()
print 'FTP connection has been closed'
The differences are:
use a with-statement to ensure the file is closed whether successful or an exception is raised
assign the result of the open() call to a name (f)
added missing parenthesis to ftp.close() so the function is called.

Categories