SOLVED:
This issue has been solved by renaming of of my two loggers. My issue arose because i would do log.getLogger again in the main file. This caused 2 instances of the logger to be made. The solution is to remove the second call, OR rename one of the two.
I am attempting to setup a custom logger for my current project, and I am having difficulty with getting it to work properly outside of __init__.py file. The issue is that anything I log is logged twice.
My Code:
__Init__.py:
import datetime as date
import os
import platform as plt
import logging as log
prefsDirectory = 'prefs/'
prefsName = 'preferences.txt'
prefsLocation = prefsDirectory + prefsName
now = date.datetime.now()
# SETUP
if not(os.path.exists(prefsLocation)):
if not(plt.system() == "Darwin"):
os.mknod(prefsLocation)
with(open(prefsLocation, 'w+')) as f:
f.write('Log increment:\n' + str(1) + "\n")
f.close()
pass
else:
if not(os.path.exists(prefsDirectory)):
os.mkdir(prefsDirectory)
with(open(prefsLocation, 'w+')) as f:
f.close()
pass
with(open(prefsLocation, 'w+')) as f:
f.write('Log increment:\n' + str(0) + "\n")
f.write('\nCurrent Date:\n' + str(now.day) + "\n")
f.close()
pass
with(open(prefsLocation, "r")) as f:
data = f.readlines()
if not(str(now.day) == data[4]):
data[4] = str(now.day)
data[1] = str(0) + '\n'
# print('This ran')
else:
inc = str(int(data[1]) + 1)
data[1] = inc + "\n"
with(open(prefsLocation, "w")) as f:
lines = (str(item) for item in data)
for item in lines:
f.write(item)
dateC = "[" + str(now.year) + "-" + str(now.month) + "-" + data[4] + "]"
logDirectory = "logs/"
inc = int(data[1])
logName2 = str(dateC) + "-" + str(inc)
logName = logName2 + ".log"
logLocation = logDirectory + logName
if not(os.path.exists(logLocation)):
if not(plt.system() == "Darwin"):
os.mknod(logLocation)
else:
if not(os.path.isdir(logDirectory)):
os.mkdir(logDirectory)
with (open(logLocation, 'w+')) as f:
f.close()
pass
formatter = log.Formatter("[%(asctime)s][%(levelname)s][%(module)s] : %(message)s \n", "%H:%M-%S" + "s")
handler = log.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel("DEBUG")
logger = log.getLogger("Main")
logger.addHandler(handler)
log.basicConfig(filename=logLocation, level=log.DEBUG, filemode="w",
format="[%(asctime)s][%(levelname)s][%(module)s] : %(message)s \n", datefmt="%H:%M-%S" + "s")
logger.info("[LOG NUMBER: " + str(inc) + "]")
logger.info("Found Settings file")
logger.info("Generated Log File")
__main__.py:
# IMPORTS
import logging as log
from main import variables as vrs
# VARIABLES
logg = vrs.logg
logg.addHandler(vrs.handlerMain)
log.basicConfig(filename=vrs.logLocation, level=log.DEBUG, filemode="w",
format="[%(asctime)s][%(levelname)s][%(module)s] : %(message)s \n", datefmt="%H:%M-%S" + "s")
with(open(vrs.prefsLocation, "r")) as f:
data = f.readlines()
# BODY
logg.info('Program Loading Completed.')
# Make a data holding file.
vrs.makefile('prefs/data.txt', 'prefs/', "Data File")
variables.py:
import datetime as date
import logging as log
import os
import platform as plt
prefsDirectory = 'prefs/'
prefsName = 'preferences.txt'
prefsLocation = prefsDirectory + prefsName
with(open(prefsLocation, "r")) as f:
data = f.readlines()
now = date.datetime.now()
dateC = "[" + str(now.year) + "-" + str(now.month) + "-" + data[4] + "]"
logDirectory = "logs/"
inc = int(data[1])
logName2 = str(dateC) + "-" + str(inc)
logName = logName2 + ".log"
logLocation = logDirectory + logName
formatter = log.Formatter("[%(asctime)s][%(levelname)s][%(module)s] : %(message)s \n", "%H:%M-%S" + "s")
handler = log.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel("DEBUG")
handler.set_name('Main')
handlerMain = log.StreamHandler()
handlerMain.setFormatter(formatter)
handlerMain.setLevel("DEBUG")
logg = log.getLogger("Main")
def makefile(filelocation, filedirectory, filename):
if not (os.path.exists(filelocation)):
if not (plt.system() == "Darwin"):
os.mknod(filelocation)
with(open(filelocation, 'w+')) as file:
file.write('File Created:\n' + dateC + "\n")
file.close()
pass
else:
if not (os.path.exists(filedirectory)):
os.mkdir(filedirectory)
with(open(filelocation, 'w+')) as file:
file.write('File Created:\n' + dateC + "\n")
file.close()
pass
logg.info('Created file: ' + filename)
I am not sure what exactly causes the issue...I think it is something with defining a logger in the init file, and a second in the variables file.
If it helps, I will provide a copy of my file structure below:
<img src="https://i.gyazo.com/5cb1221a65a9ad50adf2a355f92f90e4.png" alt="Image from Gyazo" width="315"/>
<img src="https://i.gyazo.com/39f1b61ca09ed364080254a0f678db80.png" alt="Image from Gyazo" width="1280"/>
[ I Do not seem to be able to input gyazo images into the post, can one of you community moderator people put them in for me? ALSO, the folder to look at the one called AoC2018 ]
As jobevers aluded to in his comment, you are attaching two stream handlers to the 'Main' logger. First in the __init__.py and again in __main__.py. This also explains why your logging from within __init__.py is working correctly as __main__.py hasn't attached the second handler yet.
I suspect the reason you did not expect this behaviour is because you were expecting the loggers to be distinct. But the loggers you defined in __init__.py and in variables.py are actually the same. When you retrieve a logger using logging.getLogger(logger_name) with the same logger_name it returns the same logger. So when you call logging.getLogger('Main') in variables.py it still has the StreamHandler on it from when it was added in __init__.py.
Depending on what behaviour you want you should either give them distinct names or remove the second addHandler.
It should be noted that logger names follow a hierarchy. Log configuration for my_package will also configure logging for anything my_package.model, my_package.views, etc... The root logger is simply the logger with the empty string (logging.getLogger("")).
For more details I'd recommend just going through the official docs.
Related
I'm trying to make an HTTP Server from scratch and wanted to write the log in a text file so I created this function.
def do_LOG(self, addr, request):
path = 'log/logging.txt'
host = addr[0]
port = addr[1]
method = request[:4]
headers = request.split('\n')
filename = headers[0].split()[1]
f = open(path, "a+")
f.writelines('Server used: ' + host + '\n'+'Port used: ' + port + '\n'+'Method Served: ' + method + '\n'+'Filename: ' + filename + '\n\n')
f.close()
return
This function only creates a file but is not able to write in the file. I'm overriding this function from the parent class.This is the definition in the parent class.
def do_LOG(self, addr, request):
return
Please Provide some more code
For better handling, add relative path of the directory by adding './' at the start.
Make Sure the 'log' directory exists in the project's parent directory
Still I'm providing a temporary fix-
class Logs:
def do_LOG(self, addr, request):
path = './log/logging.txt'
host = addr[0]
port = addr[1]
method = request[:4]
headers = request.split('\n')
filename = headers[0].split()[1]
f = open(path, "a+")
f.writelines('Server used: ' + host + '\n'+'Port used: ' + port + '\n'+'Method Served: ' + method + '\n'+'Filename: ' + filename + '\n\n')
f.close()
return
Logs().do_LOG("<addr>", "<request>")
Hope this helps!
First, be careful to use proper indentation (I suspect this is from copying your code).
Second, you chose the mode 'a+' which I don't know about. In order to write to a file, you should use the 'w' mode, I also recommend providing the encoding:
f = open(path, "w", encoding= "utf-8")
f.write('Server used: ' + host + '\nPort used: ' + port + '\n'+'Method Served: ' + method + '\n'+'Filename: ' + filename + '\n\n')
f.close()
If this does not work, maybe that's because there is a problem with the values of host, port or method, you should then try to write the file with multiple calls, to see where the problem occurs:
f.write('Server used')
f.write(host)
...
I am running the following block of code to create the path to a new file:
# Opens/create the file that will be created
device_name = target_device["host"].split('.')
path = "/home/user/test_scripts/configs/" + device_name[-1] + "/"
print(path)
# Check if path exists
if not os.path.exists(path):
os.makedirs(path)
# file = open(time_now + "_" + target_device["host"] + "_config.txt", "w")
file = open(path + time_now + "_" + device_name[0] + "_config.txt", "w")
# Time Stamp File
file.write('\n Create on ' + now.strftime("%Y-%m-%d") +
' at ' + now.strftime("%H:%M:%S") + ' GMT\n')
# Writes output to file
file.write(output)
# Close file
file.close()
The code run as intended with the exception that it creates and saves the files on the directory: /home/user/test_scripts/configs/ instead on the indented one that should be: /home/user/test_scripts/configs/device_name[-1]/.
Please advise.
Regards,
./daq
Try using os.path.join(base_path, new_path) [Reference] instead of string concatenation. For example:
path = os.path.join("/home/user/test_scripts/configs/", device_name[-1])
os.makedirs(path, exist_ok=True)
new_name = time_now + "_" + device_name[0] + "_config.txt"
with open(os.path.join(path, new_name), "w+") as file:
file.write("something")
Although I don't get why you're creating a directory with device_name[-1] and as a file name using device_name[0].
I'm writing a Python script to check if a file is added to a folder with watchdog, that file is going to be added to a queue.
My idea is to add the filename to a txt, then either run a new class that watches the txt and then executes a line in cmd and start for example FME.
Is it the best way to write a new .py for every new program I want to open. For example one for FME and one for notepad.
I still want the watchdog class to go into the background.
looking_for_files_and_adding_to_queue py
looking_in_queue_for_the_next_in_line_and_direct_to_3_party py
FME py
Notepad py
and so on...
Or on all.py
class looking_for_files_and_adding_to_queue
class looking_in_queue_for_the_next_in_line_and_direct_to_3_party
class FME
class Notepad
Today my script looks like this:
import time
import sys
import os
import datetime
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class MyHandler(PatternMatchingEventHandler):
patterns = ["*.tif"]
count_move = 0
def process(self, event):
if self.count_move == 1:
# the file will be processed there
folder = "P:\\03_auto\\Indata"
indata = event.src_path
#Makes a new folder in Utdata based on filename
newfolder = os.path.join(folder[:11], str("Utdata\\orto"), event.src_path[18:29])
if not os.path.exists(newfolder):
os.makedirs(newfolder)
#Logg and print start of FME
print(time.strftime('%a %H:%M:%S') + ": FME " + event.src_path[18:] + " startats i FME.")
log_file = open("P:\\03_auto\\log.txt", "a")
log_file.write(time.strftime('%a %H:%M:%S') + ": FME " + event.src_path[18:] + " startats i FME.\n")
log_file.close()
#Starting and excequting FME
var_fme = str('fme.exe "P:\\03_auto\\Script\\tiff_to_milti_jpg_tiff\\tif_to_multi-jpg-tiff.fmw" --SourceDataset_TIFF "') + indata + str('" --FEATURE_TYPES "" --DestDataset_JPEG "') + newfolder + str('" --DestDataset_JPEG_5 "') + newfolder + str('" --DestDataset_JPEG_4 "') + newfolder + str('" --DestDataset_GEOTIFF "') + newfolder + str('" --DestDataset_GEOTIFF_3 "') + newfolder + str('"')
os.system(var_fme)
#Logg and pring move file
print(time.strftime('%a %H:%M:%S') + ": Flytt " + event.src_path[18:] + " har flyttats till" + newfolder + "\nTransformering klar\n")
log_file = open("P:\\03_auto\\log.txt", "a")
log_file.write(time.strftime('%a %H:%M:%S') + ": Flytt " + event.src_path[18:] + " har flyttats till" + newfolder + "\nTransformering klar\n\n")
log_file.close()
#Move org file to Utdata\orto
file_move = newfolder + indata[17:]
os.rename(indata, file_move)
#Restets script
self.count_move = 0
else:
#Logg and pring loadning file while transfering
print(time.strftime('%a %H:%M:%S') + ": Laddar " + event.src_path[18:] + " startar inladdning.")
log_file = open("P:\\03_auto\\log.txt", "a")
log_file.write(time.strftime('%a %H:%M:%S') + ": Laddar " + event.src_path[18:] + " startar inladdning.\n")
log_file.close()
#Sets counter to 1 which enables the FME part
self.count_move += 1
def on_modified(self, event):
self.process(event)
if __name__ == '__main__':
path = "P:\\03_auto\\Indata"
observer = Observer()
observer.schedule(MyHandler(), path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
tl;dr keep everything in one file for now, split subsequently while refactoring when the file becomes huge.
Python does not force you split classes / functions into modules. We as programmers make that call for solely the purpose of readability and maintainability.
While refactoring I personally look at functions with more ~40 - 50 lines and files with ~ 1000 lines to split and try to keep closely related things together.
high cohesion and low coupling.
is a characteristic feature of good software.
Also, since you seem to be starting out with this project I would recommend you to first concentrate on making a version that works, thereafter refactor it to improve code quality.
premature optimization is the root of all evil.
I am assuming that you are looking for suggestions to improve code quality here, so here are a few things you might be also be interested in:
follow pep8 standards: https://pep8.org
make your functions / methods accept parameters instead of hardcoding them eg the path of the folder you are watching.
make your program capable of resuming operations even after erroneous / abrupt termination: eg store state with a file or database
instead of trying to implement a queue yourself use robust systems like rabbitmq or redis.
write functions / methods that perform only one operation and do it well.
This it how far I have done. Now I have to get the files from the que to FME
import time
import sys
import os
import datetime
import arrow
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from shutil import copy
class Queue:
def __init__(self):
self.items =[]
def isEmpty(self):
return self.items == []
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
self.items.pop()
def size(self):
return len(self.items)
def printqueue(self):
i = 0
for items in self.items:
i += 1
print(str(i) + ": " + items)
class MyHandler(PatternMatchingEventHandler):
patterns = ["*.tif","*.pdf"]
q = Queue()
def on_created(self, event):
file_name = os.path.basename(event.src_path)
file_type = file_name.split(".")[-1]
file_path = "path"
file_name_path = event.src_path
endwith = file_name.endswith("_mosaic_group1.tif")
new_folder = "C:\\FME_workdir\\"
new_path = new_folder + file_name
#create new temp folder for FME
if not os.path.exists(new_folder):
os.makedirs(new_folder)
#get tif file from project
if file_name.endswith("_mosaic_group1.tif") and not os.path.exists(new_path):
print("Queue:")
self.q.enqueue("[" + file_name + ", " + file_name_path + ", " + new_path + ", " + file_type + "]")
self.q.printqueue()
print("\n")
#fme = Fme()
#return fme.runfme(file_name, file_path, file_name_path)
#copy file to FME folder
if not os.path.exists(new_path):
copy(file_name_path, new_path)
#get the PDF report
elif file_name.endswith("_report.pdf") and "1_initial" in file_name_path:
pdf_path = os.path.dirname(file_name_path)
pdf_path_new_path = "\\".join(pdf_path.split("\\")[:3])
pdf_path_new_dir = "\\".join(pdf_path.split("\\")[5:6])
date_now = str(time.strftime("%y%m%d"))
pdf_new_path = pdf_path_new_path + "\\03_leverans\\" + pdf_path_new_dir + "_" + date_now
pdf_new_path_filename = pdf_new_path + "\\" + file_name
if not os.path.exists(pdf_new_path):
os.makedirs(pdf_new_path)
copy(file_name_path, pdf_new_path_filename)
#put inte que system
self.q.enqueue("[" + file_name + ", " + file_name_path + ", " + pdf_new_path + ", " + file_type + "]")
self.q.printqueue()
class Fme:
def runfme(self, file_name, file_path, file_name_path):
print("FME: " + self.file_name)
if __name__ == '__main__':
path = "P:\\"
observer = Observer()
observer.schedule(MyHandler(), path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
print("stop")
observer.join()
input('Press ENTER to exit')
Guys do anyone know how to read event log file in C:\Windows\System32\winevt\Logs with .evtx extension?
I have already tried to open it using notepad and read using python but notepad says access is denied...
Do anyone know how to do it? Thanks in advance..
This is how you would read the file "Forwarded Events" from the event viewer. You need admin access so I would run it as admin but I it will prompt you for a password if you don't.
import win32evtlog
import xml.etree.ElementTree as ET
import ctypes
import sys
def is_admin():
try:
return ctypes.windll.shell32.IsUserAnAdmin()
except:
return False
if is_admin():
# open event file
query_handle = win32evtlog.EvtQuery(
'C:\Windows\System32\winevt\Logs\ForwardedEvents.evtx',
win32evtlog.EvtQueryFilePath)
read_count = 0
a = 1
while a == 1:
a += 1
# read 1 record(s)
events = win32evtlog.EvtNext(query_handle, 1)
read_count += len(events)
# if there is no record break the loop
if len(events) == 0:
break
for event in events:
xml_content = win32evtlog.EvtRender(event, win32evtlog.EvtRenderEventXml)
# parse xml content
xml = ET.fromstring(xml_content)
# xml namespace, root element has a xmlns definition, so we have to use the namespace
ns = '{http://schemas.microsoft.com/win/2004/08/events/event}'
substatus = xml[1][9].text
event_id = xml.find(f'.//{ns}EventID').text
computer = xml.find(f'.//{ns}Computer').text
channel = xml.find(f'.//{ns}Channel').text
execution = xml.find(f'.//{ns}Execution')
process_id = execution.get('ProcessID')
thread_id = execution.get('ThreadID')
time_created = xml.find(f'.//{ns}TimeCreated').get('SystemTime')
#data_name = xml.findall('.//EventData')
#substatus = data_name.get('Data')
#print(substatus)
event_data = f'Time: {time_created}, Computer: {computer}, Substatus: {substatus}, Event Id: {event_id}, Channel: {channel}, Process Id: {process_id}, Thread Id: {thread_id}'
print(event_data)
user_data = xml.find(f'.//{ns}UserData')
# user_data has possible any data
else:
ctypes.windll.shell32.ShellExecuteW(None, "runas", sys.executable, " ".join(sys.argv), None, 1)
input()
.evtx is the extension for Windows Eventlog files. It contains data in a special binary format designed by Microsoft so you cannot simply open it in a text editor.
The are open source tools to read .evtx and the NXLog EE can also read .evtx files. (Disclaimer: I'm affiliated with the latter).
I modified the accepted answer a bit as following, so it becomes reusable:
import xml.etree.ElementTree as Et
import win32evtlog
from collections import namedtuple
class EventLogParser:
def __init__(self, exported_log_file):
self.exported_log_file = exported_log_file
def get_all_events(self):
windows_events = []
query_handle = win32evtlog.EvtQuery(str(self.exported_log_file),
win32evtlog.EvtQueryFilePath | win32evtlog.EvtQueryReverseDirection)
while True:
raw_event_collection = win32evtlog.EvtNext(query_handle, 1)
if len(raw_event_collection) == 0:
break
for raw_event in raw_event_collection:
windows_events.append(self.parse_raw_event(raw_event))
return windows_events
def parse_raw_event(self, raw_event):
xml_content = win32evtlog.EvtRender(raw_event, win32evtlog.EvtRenderEventXml)
root = Et.fromstring(xml_content)
ns = "{" + root.tag.split('}')[0].strip('{') + "}"
system = root.find(f'{ns}System')
event_id = system.find(f'{ns}EventID').text
level = system.find(f'{ns}Level').text
time_created = system.find(f'{ns}TimeCreated').get('SystemTime')
computer = system.find(f'{ns}Computer').text
WindowsEvent = namedtuple('WindowsEvent',
'event_id, level, time_created, computer')
return WindowsEvent(event_id, level, time_created, computer)
I use the "python-evtx" library, you can install it using this command:
pip install python-evtx
In my case, I'm not interested in reading records with the "Information" level.
import os
import codecs
from lxml import etree
import Evtx.Evtx as evtx
def evtxFile(absolutePath, filenameWithExt, ext, _fromDate, _toDate):
print("Reading: " + filenameWithExt)
outText = ""
channel = ""
#read the windows event viewer log and convert its contents to XML
with codecs.open(tempFilePath, "a+", "utf-8", "ignore") as tempFile:
with evtx.Evtx(absolutePath) as log:
for record in log.records():
xmlLine = record.xml()
xmlLine = xmlLine.replace(" xmlns=\"http://schemas.microsoft.com/win/2004/08/events/event\"", "")
xmlParse = etree.XML(xmlLine)
level = parseXMLtoString(xmlParse, ".//Level/text()")
if not level == "0" and not level == "4":
providerName = parseXMLtoString(xmlParse, ".//Provider/#Name")
qualifiers = parseXMLtoString(xmlParse, ".//EventID/#Qualifiers")
timestamp = parseXMLtoString(xmlParse, ".//TimeCreated/#SystemTime")
eventID = parseXMLtoString(xmlParse, ".//EventID/text()")
task = parseXMLtoString(xmlParse, ".//Task/text()")
keywords = parseXMLtoString(xmlParse, ".//Keywords/text()")
eventRecordID = parseXMLtoString(xmlParse, ".//EventRecordID/text()")
channel = parseXMLtoString(xmlParse, ".//Channel/text()")
computer = parseXMLtoString(xmlParse, ".//Computer/text()")
message = parseXMLtoString(xmlParse, ".//Data/text()")
if level == "1":
level = "Critical"
elif level == "2":
level = "Error"
elif level == "3":
level = "Warning"
date = timestamp[0:10]
time = timestamp[11:19]
time = time.replace(".", "")
_date = datetime.strptime(date, "%Y-%m-%d").date()
if _fromDate <= _date <= _toDate:
message = message.replace("<string>", "")
message = message.replace("</string>", "")
message = message.replace("\r\n", " ")
message = message.replace("\n\r", " ")
message = message.replace("\n", " ")
message = message.replace("\r", " ")
outText = date + " " + time + "|" + level + "|" + message.strip() + "|" + task + "|" + computer + "|" + providerName + "|" + qualifiers + "|" + eventID + "|" + eventRecordID + "|" + keywords + "\n"
tempFile.writelines(outText)
with codecs.open(tempFilePath, "r", "utf-8", "ignore") as tempFile2:
myLinesFromDateRange = tempFile2.readlines()
#delete the temporary file that was created
os.remove(tempFilePath)
if len(myLinesFromDateRange) > 0:
createFolder("\\filtered_data_files\\")
outFilename = "windows_" + channel.lower() + "_event_viewer_logs" + ext
myLinesFromDateRange.sort()
#remove duplicate records from the list
myFinalLinesFromDateRange = list(set(myLinesFromDateRange))
myFinalLinesFromDateRange.sort()
with codecs.open(os.getcwd() + "\\filtered_data_files\\" + outFilename, "a+", "utf-8", "ignore") as linesFromDateRange:
linesFromDateRange.seek(0)
if len(linesFromDateRange.read(100)) > 0:
linesFromDateRange.writelines("\n")
linesFromDateRange.writelines(myFinalLinesFromDateRange)
del myLinesFromDateRange[:]
del myFinalLinesFromDateRange[:]
else:
print("No data was found within the specified date range.")
print("Closing: " + filenameWithExt)
I hope it helps you or someone else in the future.
EDIT:
The "tempFilePath" can be anything you want, for example:
tempFilePath = os.getcwd() + "\\tempFile.txt"
I collected some information first before calling the "evtxFile" function:
The "From" and the "To" dates are in the following format: YYYY-MM-DD
Converted the dates to "date" data type:
_fromDate = datetime.strptime(fromDate, "%Y-%m-%d").date()
_toDate = datetime.strptime(toDate, "%Y-%m-%d").date()
Divided the directory where the .evtx files are located into different parts:
def splitDirectory(root, file):
absolutePathOfFile = os.path.join(root, file)
filePathWithoutFilename = os.path.split(absolutePathOfFile)[0]
filenameWithExt = os.path.split(absolutePathOfFile)[1]
filenameWithoutExt = os.path.splitext(filenameWithExt)[0]
extension = os.path.splitext(filenameWithExt)[1]
return absolutePathOfFile, filePathWithoutFilename, filenameWithExt, filenameWithoutExt, extension
for root, subFolders, files in os.walk(directoryPath):
for f in files:
absolutePathOfFile, filePathWithoutFilename, filenameWithExt,
filenameWithoutExt, extension = splitDirectory(root, f)
if extension == ".evtx":
evtxFile(absolutePathOfFile, filenameWithExt, ".txt", _fromDate, _toDate)
Can someone help me.
Why it is not working
import ftplib
import os
def readList(request):
machine=[]
login=[]
password=[]
for line in open("netrc"): #read netrc file
old=line.strip()
line=line.strip().split()
if old.startswith("machine"): machine.append(line[-1])
if old.startswith("login"): login.append(line[-1])
if old.startswith("password"): password.append(line[-1])
connectFtp(machine,login,password)
def connectFtp(machine,login,password):
for i in range(len(machine)):
try:
ftp = ftplib.FTP(machine[i])
print 'conected to ' + machine[i]
ftp.login(login[i],password[i])
print 'login - ' + login[i] + ' pasword -' + password[i]
except Exception,e:
print e
else:
ftp.cwd("PublicFolder")
print 'PublicFolder'
def upload(filename, file):
readList()
ext = os.path.splitext(file)[1]
if ext in (".txt", ".htm", ".html"):
ftp.storlines("STOR " + filename, open(file))
else:
ftp.storbinary("STOR " + filename, open(file, "rb"), 1024)
print 'success... yra'
upload('test4.txt', r'c:\example2\media\uploads\test4.txt')`
When it was together it was working. But when i separate it in to functions something happened, I cant understand what.
(Apart from the horrid indentation problems, which are presumably due to botched copy and paste otherwise you'd get syntax errors up the wazoo...!)...:
Scoping problem, first: connectFtp makes a local variable ftp so that variables goes away as soon as the function's done. Then upload tries using the variable, but of course it isn't there any more.
Add a return ftp at the end of connectFtp, a yield connectFtp instead of a plain call to the loop in readList, and use a for ftp in readList(): loop in upload.
Something like this?
import os
def readList(request):
machine = []
login = []
password = []
for line in open("netrc"): # read netrc file
old = line.strip()
line = line.strip().split()
if old.startswith("machine"): machine.append(line[-1])
if old.startswith("login"): login.append(line[-1])
if old.startswith("password"): password.append(line[-1])
yield connectFtp
def connectFtp(machine, login, password):
for i in range(len(machine)):
try:
ftp = ftplib.FTP(machine[i])
print 'conected to ' + machine[i]
ftp.login(login[i], password[i])
print 'login - ' + login[i] + ' pasword -' + password[i]
except Exception, e:
print e
else:
ftp.cwd("PublicFolder")
print 'PublicFolder'
return (ftp)
def upload(filename, file):
for ftp in readList():
ext = os.path.splitext(file)[1]
if ext in (".txt", ".htm", ".html"):
ftp.storlines("STOR " + filename, open(file))
else:
ftp.storbinary("STOR " + filename, open(file, "rb"), 1024)
print 'success... yra'
upload('test4.txt', r'c:\example2\media\uploads\test4.txt')
Error at line 19 something with try:
unindent does not math any outer indentation level