Python3 gnupg file encryption issue - python

I have a function to list all files in a directory and encrypt them with a public key.
The issue I'm having is that if there are several files inside the directory some of them get corrupted. Like 2 or 3 of them would be a GPG file with exactly 858 bytes size and no content. If decrypt them I would have 0-byte size files.
Function
def gpg_encrypt(source):
gpg = GPG(gnupghome='/home/mohs3n/.gnupg', use_agent=True)
try:
if os.path.isfile(source):
if source.endswith('.gpg'):
print(source + ' is already encrypted')
else:
stream = open(source, "rb")
status = gpg.encrypt_file(stream, 'C05819CE8A9DA638BD6B6E08688D1CE89FCE05B3', armor=False, always_trust=True, output=source+'.gpg', symmetric=False)
stream.close()
if status.ok:
os.remove(source)
print(source, ' successfully encrypted')
elif os.path.isdir(source):
for root, dirs, files in os.walk(source, topdown=True):
for name in files:
current_file = (os.path.join(root, name))
if current_file.endswith('.gpg'):
print(current_file + ' : is already encrypted')
else:
stream = open(current_file, "rb")
status = gpg.encrypt_file(stream, 'C05819CE8A9DA638BD6B6E08688D1CE89FCE05B3', armor=False, always_trust=True, output=source+'/'+name+'.gpg', symmetric=False)
stream.close()
if status.ok:
os.remove(current_file)
print(current_file + ' successfully encrypted')
except Exception as e:
print(e)

Related

Check list if file has downloaded and skip if it has?

I am new to Python and sure the below can be optimised however I have ran in to an issue with my last step in my script.
The aim is not to download a file if it has been previously downloaded. At this time I log the download in a file called download_history.log
I need to therefore implement a check here to kind of do the following check the log - if it exists in log do nothing and move to next file if it does not exists download the file and log it in to the file.
Any help would be appreciated.
#!/usr/bin/env python3
import boto
import sys, os
import zipfile
import shutil
import glob
import re
from boto.s3.key import Key
from boto.exception import S3ResponseError
#Make the download files
DOWNLOAD_LOCATION_PATH = os.path.expanduser("~") + "/AWSSplunk/Downloads/"
if not os.path.exists(DOWNLOAD_LOCATION_PATH):
print ("Making download directory")
os.mkdir(DOWNLOAD_LOCATION_PATH)
#Delete Output Folder if it exsists
OUTPUT_FOLDER = os.path.expanduser("~") + "/AWSSplunk/Output/"
shutil.rmtree(OUTPUT_FOLDER)
#Define the AWS Bucket
def backup_s3_folder():
BUCKET_NAME = "my-bucket-name"
AWS_ACCESS_KEY_ID= os.getenv("##################")
AWS_ACCESS_SECRET_KEY = os.getenv("#########################")
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_ACCESS_SECRET_KEY)
bucket = conn.get_bucket(BUCKET_NAME)
#goto through the list of files
bucket_list = bucket.list()
for l in bucket_list:
key_string = str(l.key)
s3_path = DOWNLOAD_LOCATION_PATH + key_string
try:
# Add files to the log file
print ("Downloading file ", key_string)
file_object = open('download_history.log', 'a')
file_object.write(key_string)
file_object.write("\n")
# Working code
file_object.close()
l.get_contents_to_filename(s3_path)
except (OSError,S3ResponseError) as e:
pass
# check if the file has been downloaded locally
if not os.path.exists(s3_path):
try:
os.makedirs(s3_path)
except OSError as exc:
# let guard againts race conditions
import errno
if exc.errno != errno.EEXIST:
raise
if __name__ == '__main__':
backup_s3_folder()
# Start the unzipping process
print("Unzipping Starting")
dir_path = os.path.expanduser("~") + "/AWSSplunk/Downloads/"
for path, dir_list, file_list in os.walk(dir_path):
for file_name in file_list:
if file_name.endswith(".zip"):
abs_file_path = os.path.join(path, file_name)
parent_path = os.path.split(abs_file_path)[0]
output_folder_name = os.path.splitext(abs_file_path)[0]
output_path = os.path.join(parent_path, output_folder_name)
zip_obj = zipfile.ZipFile(abs_file_path, 'r')
zip_obj.extractall(output_path)
zip_obj.close()
print("Unzipping Completed")
# Start moving files to output
print("Moving Files")
FILE_LOCATION_PATH = os.path.expanduser("~") + "/AWSSplunk/Output/"
if not os.path.exists(FILE_LOCATION_PATH):
print ("Making download directory")
os.mkdir(FILE_LOCATION_PATH)
# .log files move
for root, dirs, files in os.walk(dir_path):
for file in files:
if file.endswith('.log'):
count = 1
destination_file = os.path.join(FILE_LOCATION_PATH, file)
while os.path.exists(destination_file):
destination_file = os.path.join(FILE_LOCATION_PATH, f"{file}_{count}")
count += 1
shutil.move(os.path.join(root, file), destination_file)
# .txt files move
for root, dirs, files in os.walk(dir_path):
for file in files:
if file.endswith('.txt'):
count = 1
destination_file = os.path.join(FILE_LOCATION_PATH, file)
while os.path.exists(destination_file):
destination_file = os.path.join(FILE_LOCATION_PATH, f"{file}_{count}")
count += 1
shutil.move(os.path.join(root, file), destination_file)
# .json files move
for root, dirs, files in os.walk(dir_path):
for file in files:
if file.endswith('.json'):
count = 1
destination_file = os.path.join(FILE_LOCATION_PATH, file)
while os.path.exists(destination_file):
destination_file = os.path.join(FILE_LOCATION_PATH, f"{file}_{count}")
count += 1
shutil.move(os.path.join(root, file), destination_file)
print("Files Move Complete")
# Delete Directory
print("Cleaning up Downloads Directory")
shutil.rmtree(DOWNLOAD_LOCATION_PATH)
# Remove EFR Audit Logs stratinbg with 2020
print("Remove the encrypted Audit Logs")
pattern = "^(2020)"
FILE_LOCATION_PATH = os.path.expanduser("~") + "/AWSSplunk/Output/"
for root, dirs, files in os.walk(FILE_LOCATION_PATH):
for file in filter(lambda x: re.match(pattern, x), files):
os.remove(os.path.join(root, file))
# Remove EFR Audit Logs stratinbg with EFR
pattern = "^(EFR)"
FILE_LOCATION_PATH = os.path.expanduser("~") + "/AWSSplunk/Output/"
for root, dirs, files in os.walk(FILE_LOCATION_PATH):
for file in filter(lambda x: re.match(pattern, x), files):
os.remove(os.path.join(root, file))
# Remove EFR Audit Logs stratinbg with 2019
pattern = "^(2019)"
FILE_LOCATION_PATH = os.path.expanduser("~") + "/AWSSplunk/Output/"
for root, dirs, files in os.walk(FILE_LOCATION_PATH):
for file in filter(lambda x: re.match(pattern, x), files):
os.remove(os.path.join(root, file))
# Script clean up
print("Script Complete")
#with open("download_history.log", "a") as myfile:
# myfile.write('New Line\n')
With os you can check whether a file exist or not:
if not os.isfile(PATH_TO_EXPECTED_DOWNLOADED_FILE):
#do download
For your own security please seperate your steps into functions and build a pipeline of these.

Not able to download files from FTP

I am trying to download files using python script from my ftp server...However i am getting the files which are of size 0 kb...i can't understand exactly where i am wrong...I am actually searching the files by a particular string in filename and then downloading all the files having that string on my ftp in a given directory.
Here is my code:
# Libraries
import re
import os
import ftplib
import ntpath
ftp = ftplib.FTP("192.168.1.786:22")
ftp.login("Marshmellow", "YourPasswordHere")
##ftp.dir("feed_1")
files = []
## F = open('Files.txt','a')
try:
files = ftp.nlst("feed_1")
for fname in files:
res = re.findall("2018-07-25", fname)
if res:
# Open the file for writing in binary mode
print 'Opening local file ' + ntpath.basename(fname)
file = open(ntpath.basename(fname), 'wb')
# Download the file a chunk at a time
# Each chunk is sent to handleDownload
# We append the chunk to the file and then print a '.' for progress
# RETR is an FTP command
print 'Getting ' + ntpath.basename(fname)
try:
ftp.retrbinary('RETR ' + ntpath.basename(fname), file.write)
except:
pass
# Clean up time
print 'Closing file ' + ntpath.basename(fname)
file.close()
print (fname)
## F.write(fname + '\n')
if not res:
continue
except ftplib.error_perm , resp:
if str(resp) == "550 No files found":
print "No files in this directory"
pass
else:
raise
## F.close()
Help Me Out if anyone knows what's wrong in this.
try:
ftp.cwd("feed_1")
files = ftp.nlst() for fname in files:
res = re.findall("2018-07-25", fname) if res:
# Open the file for writing in binary mode
print 'Opening local file ' + ntpath.basename(fname)
file = open(ntpath.basename(fname), 'wb')
i've just set the current working directory using ftp.cwd("feed_1") which i did the wrong way earlier like: files = ftp.nlst("feed_1")

Ignore (skip) corrupt files when logging metadata and proceed with the rest

A metadata logging code gets stuck on a corrupt files and gives the error shown below.
How to skip (ignore) corrupt files and continue the program?
Code (lines 68-87 and 198-204):
#lines 68-87:
def createBasicInfoListFromDisk():
global diskCompareListDetails, onlyFileNameOnDisk, driveLetter,walk_dir
walk_dir = os.path.abspath(walk_dir)
for root, subdirs, files in os.walk(walk_dir, topdown=True, onerror=None, followlinks=True ):
for filename in files:
file_path = os.path.join(root, filename)
temp = file_path.split(":")
driveLetter = temp[0]
filePathWithoutDriveLetter = temp[1]
fileSize = os.path.getsize(file_path)
mod_on = get_last_write_time(file_path)
print('\t- file %s (full path: %s)' % (filename, file_path))
print('FileName : {filename} is of size {size} and was modified on{mdt}'.format(filename=file_path,size=fileSize,mdt=mod_on ))
diskCompareListDetails.append("\"" + filePathWithoutDriveLetter+"\",\""+str(fileSize) + "\",\"" + mod_on +'"')
onlyFileNameOnDisk.append("\""+filePathWithoutDriveLetter+"\"")
return
#lines 198-204:
foundFile = 0
foundFile=findAndReadCSVFile(csvfilewithPath)
createBasicInfoListFromDisk()
compareLogAndDiskLists()
displayInfoForUserInput()
processFiles(foundFile)
writeCSVFile(csvfilewithPath)
Error:
FileName : T:\STBY\file1.txt is of size 1241 and was modified on2006-03-15 20:35:00
Traceback (most recent call last):
File "c:\scripts\test.py", line 200, in <module>
createBasicInfoListFromDisk()
File "c:\scripts\test.py", line 79, in createBasicInfoListFromDisk
fileSize = os.path.getsize(file_path)
File "C:\Python\Python36\lib\genericpath.py", line 50, in getsize
return os.stat(filename).st_size
FileNotFoundError: [WinError 3] The system cannot find the path specified: 'T:\\STBY\\file1.txt'
Modification - no error message, but 50% of files are skipped, files, that a powershell script recognizes as ok:
def createBasicInfoListFromDisk():
try:
global diskCompareListDetails, onlyFileNameOnDisk, driveLetter,walk_dir
walk_dir = os.path.abspath(walk_dir)
for root, subdirs, files in os.walk(walk_dir, topdown=True, onerror=None, followlinks=True ):
for filename in files:
file_path = os.path.join(root, filename)
temp = file_path.split(":")
driveLetter = temp[0]
filePathWithoutDriveLetter = temp[1]
fileSize = os.path.getsize(file_path)
mod_on = get_last_write_time(file_path)
print('\t- file %s (full path: %s)' % (filename, file_path))
print('FileName : {filename} is of size {size} and was modified on{mdt}'.format(filename=file_path,size=fileSize,mdt=mod_on ))
diskCompareListDetails.append("\"" + filePathWithoutDriveLetter+"\",\""+str(fileSize) + "\",\"" + mod_on +'"')
onlyFileNameOnDisk.append("\""+filePathWithoutDriveLetter+"\"")
except OSError:
pass
return "ERROR"
One way is using try-except:
try:
fileSize = os.path.getsize(file_path)
except OSError as e:
fileSize = -1
print('error thrown when handle {0}'.format(file_path)
Alternatively, you can check wether file exists before invoke getsize():
fileSize = -1 if not os.path.exists(file_path) else os.path.getsize(file_path)

How to encrypt multiple files using python

I am trying to search for .txt files in a specified folder and encrypt each one of the .txt files found using my encryption algorithms. However I cannot seem to be able to figure out how to encrypt all the .txt files found within the folder and rename them
this is the code I am working with currently
import time, os, sys, encrypt, decrypt, caesarCipher, reverseCipher, vigenereCipher, glob
def main():
outputFilename = 'ABC.encrypted.txt'
mKey = 5
myMode = 'encrypt'
for root, dirs, files in os.walk('/Ransom'):
for file in files:
if file.endswith((".txt")):
inputFilename = os.path.join(root, file)
if not os.path.exists(inputFilename):
print('The file %s does not exist. Exiting....' % (inputFilename))
sys.exit()
fileObj = open(inputFilename)
content = fileObj.read()
fileObj.close()
print ('%sing...' % (myMode.title()))
startTime = time.time()
if myMode == 'encrypt':
translated = encrypt.encryptMess(mKey, content, myMode)
elif myMode == 'decrypt':
translated = decrypt.decryptMess(mKey, content, myMode)
outputFileObj = open(outputFilename, 'w')
outputFileObj.write(translated)
outputFileObj.close()
print('Done %sing %s (%s characters).' % (myMode, inputFilename, len(content)))
print('%sed file is %s.' % (myMode.title(), outputFilename))
if __name__ == '__main__':
main()
I really appreciate any help to guide me into achieving this.
This code iterates over all the files in a given folder and calls a designated method whenever the file is '*.txt'
import os
baseUrl = './'
def encryptFile(filename):
# process one file here
print baseUrl + filename
alist = next(os.walk(baseUrl))[2]
for i in xrange(len(alist)):
afile = alist[i]
if afile[-4:] == '.txt':
encryptFile(afile)

python unzip files below the root folder

i would like to unzip all the folders and files of an archive below the root folder, i have archive named abc.zip which gives me files as abc/xyz/ abc/123.jpg abc/xyz1/ , i just want to extract xyz/ , 123.jpg and xyz1/ in the CWD
i use below code to extract a file, but would need help on how to omit the root folder of the list
def unzip_artifact( local_directory, file_path ):
fileName, ext = os.path.splitext( file_path )
if ext == ".zip":
Downloadfile = basename(fileName) + ext
print 'unzipping file ' + Downloadfile
try:
zipfile.ZipFile(file_path).extractall(local_directory)
except zipfile.error, e:
print "Bad zipfile: %s" % (e)
return
You have to use a more complex (and therefore more customizable) way to unzip. Instead of using the 'extractall' method, you must extract each files separately with the 'extract' method. Then you will be able to change the destination directory, omitting archive's sub-directories.
Here is your code with the modification you needed :
def unzip_artifact( local_directory, file_path ):
fileName, ext = os.path.splitext( file_path )
if ext == ".zip":
Downloadfile = fileName + ext
print 'unzipping file ' + Downloadfile
try:
#zipfile.ZipFile(file_path).extractall(local_directory) # Old way
# Open the zip
with zipfile.ZipFile(file_path) as zf:
# For each members of the archive
for member in zf.infolist():
# If it's a directory, continue
if member.filename[-1] == '/': continue
# Else write its content to the root
with open(local_directory+'/'+os.path.basename(member.filename), "w") as outfile:
outfile.write(zf.read(member))
except zipfile.error, e:
print "Bad zipfile: %s" % (e)
return

Categories