The following script is an extract from
https://github.com/RittmanMead/obi-metrics-agent/blob/master/obi-metrics-agent.py
The script is written in jython & it hits the weblogic admin console to extract metrics
The problem is it runs only once and does not loop infinitely
Here's the script that I've extracted from the original for my purpose:
import calendar, time
import sys
import getopt
print '---------------------------------------'
# Check the arguments to this script are as expected.
# argv[0] is script name.
argLen = len(sys.argv)
if argLen -1 < 2:
print "ERROR: got ", argLen -1, " args, must be at least two."
print '$FMW_HOME/oracle_common/common/bin/wlst.sh obi-metrics-agent.py <AdminUserName> <AdminPassword> [<AdminServer_t3_url>] [<Carbon|InfluxDB>] [<target host>] [<target port>] [targetDB influx db>'
exit()
outputFormat='CSV'
url='t3://localhost:7001'
targetHost='localhost'
targetDB='obi'
targetPort='8086'
try:
wls_user = sys.argv[1]
wls_pw = sys.argv[2]
url = sys.argv[3]
outputFormat=sys.argv[4]
targetHost=sys.argv[5]
targetPort=sys.argv[6]
targetDB=sys.argv[7]
except:
print ''
print wls_user, wls_pw,url, outputFormat,targetHost,targetPort,targetDB
now_epoch = calendar.timegm(time.gmtime())*1000
if outputFormat=='InfluxDB':
import httplib
influx_msgs=''
connect(wls_user,wls_pw,url)
results = displayMetricTables('Oracle_BI*','dms_cProcessInfo')
while True:
for table in results:
tableName = table.get('Table')
rows = table.get('Rows')
rowCollection = rows.values()
iter = rowCollection.iterator()
while iter.hasNext():
row = iter.next()
rowType = row.getCompositeType()
keys = rowType.keySet()
keyIter = keys.iterator()
inst_name= row.get('Name').replace(' ','-')
try:
server= row.get('Servername').replace(' ','-').replace('/','_')
except:
try:
server= row.get('ServerName').replace(' ','-').replace('/','_')
except:
server='unknown'
try:
host= row.get('Host').replace(' ','-')
except:
host=''
while keyIter.hasNext():
columnName = keyIter.next()
value = row.get(columnName )
if columnName.find('.value')>0:
metric_name=columnName.replace('.value','')
if value is not None:
if outputFormat=='InfluxDB':
influx_msg= ('%s,server=%s,host=%s,metric_group=%s,metric_instance=%s value=%s %s') % (metric_name,server,host,tableName,inst_name, value,now_epoch*1000000)
influx_msgs+='\n%s' % influx_msg
conn = httplib.HTTPConnection('%s:%s' % (targetHost,targetPort))
## TODO pretty sure should be urlencoding this ...
a=conn.request("POST", ("/write?db=%s" % targetDB), influx_msg)
r=conn.getresponse()
if r.status != 204:
print 'Failed to send to InfluxDB! Error %s Reason %s' % (r.status,r.reason)
print influx_msg
#sys.exit(2)
else:
print 'Skipping None value %s,server=%s,host=%s,metric_group=%s,metric_instance=%s value=%s %s' % (metric_name,server,host,tableName,inst_name, value,now_epoch*1000000)
I've tried to use the While loop, but that just stopped the code from exiting and not re-looping
What I want to achieve is to loop it infinitely post connection to weblogic
i.e. after this line
connect(wls_user,wls_pw,url)
and perhaps sleep for 5 seconds before re-running
Any and all help will be appreciated
Thanks
P
You can use this kind of condition for the loop :
mainLoop = 'true'
while mainLoop == 'true' :
and this for the pause between iterations :
java.lang.Thread.sleep(3 * 1000)
Related
I have a small Python snippet that a monitoring tool that we have uses to pull information from crestron devices.
Is there a simple way to convert this small snippet into SSH.
I am not familiar with python at all so excuse me if there is an obvious answer but is this something that a free lance python programmer would be able to whip up or would this take a considerable amount of time.
Looking for any help I can get on this.
Thanks!
import time
import telnetlib
#Globals:
CACHE_DATA = {}
SNIPPET_NAME = 'Crestron: DCM Cache'
FAILED_COUNT = 0
COLLECTION_PROBLEM = False
TELNET_PORT = 23 ##Crestron only support port 23, don't use cred_port
TELNET_TIMEOUT = 2 ##default timeout in seconds, note if cred_timeout >= 2000 we will readjust later.
FAILED_ITEMS = []
self.logger.ui_debug('************** %s: Starting *******************' % (SNIPPET_NAME))
#Main:
if self.cred_details['cred_type'] == 5: ##only allow Snippet cred types...
#set global telnet timeout... if one has not been set, set it now.
if self.cred_details['cred_timeout'] >= 2000:
TELNET_TIMEOUT = int(self.cred_details['cred_timeout']/1000)
#start timer
start_time = time.time()
try:
#connect to telnet
tn = telnetlib.Telnet(self.cred_details['cred_host'], TELNET_PORT, TELNET_TIMEOUT)
#todo: add password handling.
tn.read_until(">", TELNET_TIMEOUT)
for obj_oid in result_handler.oids:
##obj_name = result_handler[obj_oid]['name']
try:
#run oid as CLI call from result_handler
tn.write(obj_oid+"\r")
rawdata = tn.read_until(">", TELNET_TIMEOUT)
if rawdata:
result_handler[obj_oid] = [(0,"Collection Ok")]
CACHE_DATA[obj_oid] = rawdata.strip()
else:
FAILED_COUNT += 1
result_handler[obj_oid] = [(0,"Failed: No data found")]
FAILED_ITEMS.append(obj_oid)
except:
FAILED_ITEMS.append(obj_oid)
result_handler[obj_oid] = [(0,'Failed: Collection: %s' % obj_oid)]
FAILED_COUNT +=1
#save job time for perf graph
CACHE_DATA['dcm_run_time'] = round(time.time() - start_time,4)
#gracefully quit the telnet session so as to not leave any defunct processes on the host device.
tn.write("bye\r")
tn.close()
if FAILED_COUNT is 0:
em7_cache_result(CACHE_DATA)
else:
COLLECTION_PROBLEM = True
PROBLEM_STR = "%s: Some Requests Failed: %s" % (SNIPPET_NAME, FAILED_ITEMS)
except:
COLLECTION_PROBLEM = True
PROBLEM_STR = "%s: Failed to Connect to Remote Device: %s: Port %s" % (SNIPPET_NAME, self.cred_details['cred_host'], TELNET_PORT)
else:
COLLECTION_PROBLEM = True
PROBLEM_STR = "%s: Wrong Credential Type Aligned to This Dynamic Application" % (SNIPPET_NAME)
You should check out paramiko.
Example for opening an SSH connection and running 'ls':
import base64
import paramiko
key = paramiko.RSAKey(data=base64.b64decode(b'AAA...'))
client = paramiko.SSHClient()
client.get_host_keys().add('ssh.example.com', 'ssh-rsa', key)
client.connect('ssh.example.com', username='strongbad', password='thecheat')
stdin, stdout, stderr = client.exec_command('ls')
for line in stdout:
print('... ' + line.strip('\n'))
client.close()
I tried to run a simple crawler in Python:
import sys
import csv
import socket
import sqlite3
import logging
from optparse import OptionParser
from urlparse import urlparse
#pip install requests
import requests
#################################################################
# FUNCTION process_row_to_db.
# handle one row and push to the DB
#
#################################################################
def process_row_to_db(conn, data_row, comment, hostname):
insert_stmt = "INSERT OR IGNORE INTO adstxt (SITE_DOMAIN, EXCHANGE_DOMAIN, SELLER_ACCOUNT_ID, ACCOUNT_TYPE, TAG_ID, ENTRY_COMMENT) VALUES (?, ?, ?, ?, ?, ? );"
exchange_host = ''
seller_account_id = ''
account_type = ''
tag_id = ''
if len(data_row) >= 3:
exchange_host = data_row[0].lower()
seller_account_id = data_row[1].lower()
account_type = data_row[2].lower()
if len(data_row) == 4:
tag_id = data_row[3].lower()
#data validation heurstics
data_valid = 1;
# Minimum length of a domain name is 1 character, not including extensions.
# Domain Name Rules - Nic AG
# www.nic.ag/rules.htm
if(len(hostname) < 3):
data_valid = 0
if(len(exchange_host) < 3):
data_valid = 0
# could be single digit integers
if(len(seller_account_id) < 1):
data_valid = 0
## ads.txt supports 'DIRECT' and 'RESELLER'
if(len(account_type) < 6):
data_valid = 0
if(data_valid > 0):
logging.debug( "%s | %s | %s | %s | %s | %s" % (hostname, exchange_host, seller_account_id, account_type, tag_id, comment))
# Insert a row of data using bind variables (protect against sql injection)
c = conn.cursor()
c.execute(insert_stmt, (hostname, exchange_host, seller_account_id, account_type, tag_id, comment))
# Save (commit) the changes
conn.commit()
return 1
return 0
# end process_row_to_db #####
#################################################################
# FUNCTION crawl_to_db.
# crawl the URLs, parse the data, validate and dump to a DB
#
#################################################################
def crawl_to_db(conn, crawl_url_queue):
rowcnt = 0
myheaders = {
'User-Agent': 'AdsTxtCrawler/1.0; +https://github.com/InteractiveAdvertisingBureau/adstxtcrawler',
'Accept': 'text/plain',
}
for aurl in crawl_url_queue:
ahost = crawl_url_queue[aurl]
logging.info(" Crawling %s : %s " % (aurl, ahost))
r = requests.get(aurl, headers=myheaders)
logging.info(" %d" % r.status_code)
if(r.status_code == 200):
logging.debug("-------------")
logging.debug(r.request.headers)
logging.debug("-------------")
logging.debug("%s" % r.text)
logging.debug("-------------")
tmpfile = 'tmpads.txt'
with open(tmpfile, 'wb') as tmp_csv_file:
tmp_csv_file.write(r.text)
tmp_csv_file.close()
with open(tmpfile, 'rb') as tmp_csv_file:
#read the line, split on first comment and keep what is to the left (if any found)
line_reader = csv.reader(tmp_csv_file, delimiter='#', quotechar='|')
comment = ''
for line in line_reader:
logging.debug("DATA: %s" % line)
try:
data_line = line[0]
except:
data_line = "";
#determine delimiter, conservative = do it per row
if data_line.find(",") != -1:
data_delimiter = ','
elif data_line.find("\t") != -1:
data_delimiter = '\t'
else:
data_delimiter = ' '
data_reader = csv.reader([data_line], delimiter=',', quotechar='|')
for row in data_reader:
if len(row) > 0 and row[0].startswith( '#' ):
continue
if (len(line) > 1) and (len(line[1]) > 0):
comment = line[1]
rowcnt = rowcnt + process_row_to_db(conn, row, comment, ahost)
return rowcnt
# end crawl_to_db #####
#################################################################
# FUNCTION load_url_queue
# Load the target set of URLs and reduce to an ads.txt domains queue
#
#################################################################
def load_url_queue(csvfilename, url_queue):
cnt = 0
with open(csvfilename, 'rb') as csvfile:
targets_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
for row in targets_reader:
if len(row) < 1 or row[0].startswith( '#' ):
continue
for item in row:
host = "localhost"
if "http:" in item or "https:" in item :
logging.info( "URL: %s" % item)
parsed_uri = urlparse(row[0])
host = parsed_uri.netloc
else:
host = item
logging.info( "HOST: %s" % item)
skip = 0
try:
#print "Checking DNS: %s" % host
ip = socket.gethostbyname(host)
if "127.0.0" in ip:
skip = 0 #swap to 1 to skip localhost testing
elif "0.0.0.0" in ip:
skip = 1
else:
logging.info(" Validated Host IP: %s" % ip)
except:
skip = 1
if(skip < 1):
ads_txt_url = 'http://{thehost}/ads.txt'.format(thehost=host)
logging.info(" pushing %s" % ads_txt_url)
url_queue[ads_txt_url] = host
cnt = cnt + 1
return cnt
# end load_url_queue #####
#### MAIN ####
arg_parser = OptionParser()
arg_parser.add_option("-t", "--targets", dest="target_filename",
help="list of domains to crawl ads.txt from", metavar="FILE")
arg_parser.add_option("-d", "--database", dest="target_database",
help="Database to dump crawled data into", metavar="FILE")
arg_parser.add_option("-v", "--verbose", dest="verbose", action='count',
help="Increase verbosity (specify multiple times for more)")
(options, args) = arg_parser.parse_args()
if len(sys.argv)==1:
arg_parser.print_help()
exit(1)
log_level = logging.WARNING # default
if options.verbose == 1:
log_level = logging.INFO
elif options.verbose >= 2:
log_level = logging.DEBUG
logging.basicConfig(filename='adstxt_crawler.log',level=log_level,format='%(asctime)s %(filename)s:%(lineno)d:%(levelname)s %(message)s')
crawl_url_queue = {}
conn = None
cnt_urls = 0
cnt_records = 0
cnt_urls = load_url_queue(options.target_filename, crawl_url_queue)
if (cnt_urls > 0) and options.target_database and (len(options.target_database) > 1):
conn = sqlite3.connect(options.target_database)
with conn:
cnt_records = crawl_to_db(conn, crawl_url_queue)
if(cnt_records > 0):
conn.commit()
#conn.close()
print "Wrote %d records from %d URLs to %s" % (cnt_records, cnt_urls, options.target_database)
logging.warning("Wrote %d records from %d URLs to %s" % (cnt_records, cnt_urls, options.target_database))
logging.warning("Finished.")
I'm using Python 2.7.9.
I tried to install sqlite with this command:
python -m pip install sqlite
I got back this:
Downloading/unpacking sqlite3 Could not find any downloads that
satisfy the requirement sqlite3 Cleaning up... No distributions at all
found for sqlite3 Storing debug log for failure in ...\pip.log
First step would be this command:
$sqlite3 adstxt.db < adstxt_crawler.sql
I got these:
"'sqlite3' is not recognized as an internal or external command, operable program or batch file."
I know it's very basic, but I haven't found any relevant help, if you could help me, I really apprecitiate it.
Thanks.
Adam
The first error:
'sqlite3' is not recognized as an internal or external command, operable program or batch file.
Is because you try to run sqlite command line tool, which is not installed on your system. Python 3 includes sqlite but does not provide the standalone command sqlite3
The second error is a syntax error. In Python 3, print is a standard function, so must be used with parenthesis
print('hello world')
You probably tried to run python 2 code with Python 3 interpreter
If I run "python /home/pi/temp/getTemp.py" from the terminal command line I get
"Error, serial port '' does not exist!" If I cd to the temp directory and run "python getTemp.py" it runs fine. Can anyone tell me why?
#!/usr/bin/env python
import os
import sys
import socket
import datetime
import subprocess
import signal
port = "/dev/ttyUSB0"
tlog = '-o%R,%.4C'
hlog = '-HID:%R,H:%h'
clog = '-OSensor %s C: %.2C'
def logStuff(data):
with open("/home/pi/temp/templog.txt", "a") as log_file:
log_file.write(data + '\n')
def main():
try:
output = subprocess.check_output(['/usr/bin/digitemp_DS9097U', '-q', '-a'])
for line in output.split('\n'):
if len(line) == 0:
logStuff("len line is 0")
continue
if 'Error' in line:
logStuff("error in output")
sys.exit()
line = line.replace('"','')
if line.count(',') == 1:
(romid, temp) = line.split(',')
poll = datetime.datetime.now().strftime("%I:%M:%S %p on %d-%B-%y")
content =(romid + "," + poll + "," + temp)
print content
return content
except subprocess.CalledProcessError, e:
print "digitemp error:\n", e.output
except Exception as e:
logStuff('main() error: %s' %e)
os.kill(os.getpid(), signal.SIGKILL)
if __name__ == "__main__":
main()
It probably cannot find the configuration file, which is normally stored in ~/.digitemprc when you run it with -i to initialize the network. If it was created in a different directory you need to always tell digitemp where to find it by passing -c
I have a code which requires to pass the latency, upspeed, dlspeed to another web site to display. Right now the code is as below
import datetime
import os
import sys
import shutil
import webbrowser
import tempfile
import subprocess
import json
import urllib.request
import statistics
import pymysql
import pymysql.cursors
IPERF3_WIN_PATH = "data/iperf3.exe"
HTML_TEMPLATE_PATH = "data/template.html"
IPERF3_HOST = "127.0.0.1"
RESULT_UPLOAD_URL = "UPLOAD URL"
RESULT_VIEW_URL = "VIEW URL"
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller
This is to get a path which will work with pyinstaller
"""
try:
# PyInstaller creates a temp folder and stores path in
# _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
def ping(ip, tries):
""" Ping "ip" using the windows ping commmand
Return the average ping as a int
"""
res = 0
try:
output = subprocess.check_output(
["ping", "-n", str(tries), ip]).decode("utf-8")
res = int(output.split(" = ")[-1].split("ms")[0])
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Error while trying to ping the server, exiting")
else:
return res
def copyIperf3Exec():
""" On OSX :
Copy the iperf3 binary to a tmp file,
make it executable and return his path
This is to avoid many bundle related problems
On Windows, just return the package path """
return resource_path(IPERF3_WIN_PATH)
def get_iperf3_download():
""" Return the output of the iperf3 cli as a python dict """
ipf3_tmp = copyIperf3Exec()
try:
output = subprocess.check_output([ipf3_tmp,
"-c", IPERF3_HOST,
"-J",
"-P", "16",
"-w", "710000",
"-R"])
res_string = output.decode("utf-8")
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Problem while doing the test, please try again later")
else:
return json.loads(res_string)
def get_iperf3_upload():
""" Return the output of the iperf3 cli as a python dict """
ipf3_tmp = copyIperf3Exec()
try:
output = subprocess.check_output([ipf3_tmp,
"-c", IPERF3_HOST,
"-J",
"-P", "10",
"-w", "710000"])
res_string = output.decode("utf-8")
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Error while doing the upload test, please try again later")
else:
return json.loads(res_string)
def get_userinfos():
""" Get the 3 informations to be presented to the user
( ip, upload speed, download speed )
Return a Dictionary
"""
show_start_msg(0) # 0% Progress bar
avg_latency = ping(IPERF3_HOST, 5)
u_json = get_iperf3_upload()
show_start_msg(1) # 40%
d_json = get_iperf3_download()
show_start_msg(2) # 80%
ip = getip_apify()
u_bits_per_second = u_json['end']['sum_received']['bits_per_second']
d_bits_per_second = d_json['end']['sum_received']['bits_per_second']
u_testtime = u_json['end']['sum_received']['seconds']
d_testtime = d_json['end']['sum_received']['seconds']
u_testdate = u_json["start"]["timestamp"]["timesecs"]
d_testdate = d_json["start"]["timestamp"]["timesecs"]
res = {
'ip': ip,
'latency': avg_latency,
'upspeed': u_bits_per_second,
'dlspeed': d_bits_per_second,
'upspeedtime': u_testtime,
'dlspeedtime': d_testtime,
'upspeeddate': u_testdate,
'dlspeeddate': d_testdate
}
return res
def sendToDB(infos):
# Connect to the database
connection = pymysql.connect(host='127.0.0.1',
user='testclient',
password='password',
db='speed',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
def stp_date(stp):
return datetime.datetime.fromtimestamp(stp).strftime(
'%Y-%m-%d %H:%M:%S')
sql = ("INSERT INTO `speedlog`"
"(`externalIP`, `uploadspeed`, `uploadspeedtime`,"
"`uploadspeeddate`, `downloadspeed`, `downloadspeedtime`,"
"`downloadspeeddate`, `latency`)"
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(sql,
(infos["ip"],
str(int(infos["upspeed"])),
str("{0:.2f}".format(infos["upspeedtime"])),
stp_date(infos["upspeeddate"]),
str(int(infos["dlspeed"])),
str("{0:.2f}".format(infos["dlspeedtime"])),
stp_date(infos["dlspeeddate"]),
str(int(infos["latency"]))))
# connection is not autocommit by
# default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
return
def getip_apify():
res = urllib.request.urlopen("http://api.ipify.org")
raw_ip = res.read()
return raw_ip.decode('utf-8')
def prepare_template(templatePath, infos):
""" Load an html located at templatePath and replace the necessary text
with the associated values from the iPerf3 infos
Return a string
"""
f_template = open(templatePath)
s_template = f_template.read()
f_template.close()
mod_template = s_template.replace("avglatency", str(int(infos['latency'])))
mod_template = mod_template.replace(
"upspeed", str("{0:.3f}".format(infos['upspeed']/(1000*1000*1000))))
mod_template = mod_template.replace(
"dlspeed", str("{0:.3f}".format(infos['dlspeed']/(1000*1000*1000))))
return mod_template
def str_to_tempHtml(str):
""" Write "str" in an .html temporary file
And return his path
"""
data = bytes(str, "utf-8")
tmp = tempfile.NamedTemporaryFile(suffix=".html", delete=False)
tmp.write(data)
tmp.flush()
return tmp.name
def show_start_msg(progress):
if sys.platform.startswith('darwin'):
unused = os.system('clear')
elif sys.platform.startswith('win32'):
unused = os.system('cls')
print("="*70)
print("Speed Testing for 10G Network \n")
print("Powered by iPerf3")
print("="*70)
if progress == -1:
input("Press Enter to Continue...\n")
return
else:
print("Press Enter to Continue...\n")
print("Testing in progress")
if progress == 0:
print("[" + " "*68 + "]" + " 0%")
elif progress == 1:
print("[" + "#" * 27 + " " * 41 + "]" + " 40%")
elif progress == 2:
print("[" + "#" * 54 + " " * 14 + "]" + " 80%")
elif progress == 3:
print("[" + "#"*68 + "]" + " 100%")
print("Completed")
if __name__ == '__main__':
show_start_msg(-1)
infos = get_userinfos()
sendToDB(infos)
show_start_msg(3) # 100% Complete
data = { "key":"Jasdkjfhsda349*lio34sdfFdslaPisdf",
"download":"2048000",
"upload":"2048000",
"latency":"10"}
req = urllib.request.Request(RESULT_UPLOAD_URL, json.dumps(data).encode(
'ascii'))
req.add_header('Content-Type', 'application/json')
resp = urllib.request.urlopen(req).read().decode('ascii')
resp = resp.replace('\'', '"')
webbrowser.open(RESULT_VIEW_URL.format(json.loads(resp)['test_id']))
input("Press Enter to Continue...")
My latency, upspeed and dlspeed variables are stored as infos, and later sent over to the DB for recording via sendtoDB(infos).
The next part is to also pass these sets of variables to another web using RESTful, which in the data, the first attribute "key" is the REST key for authentication, followed by the rest of the values like latency, downloadspeed and uploadspeed. However, you can see that in the data, all the 3 variables are hard-coded value instead of the values derived from the test, which is latency, upspeedand dlspeed.
How can I modify the code to get these attributes instead of the hardcoded ones?
You have a method that returns this dictionary...
res = {
'ip': ip,
'latency': avg_latency,
'upspeed': u_bits_per_second,
'dlspeed': d_bits_per_second,
'upspeedtime': u_testtime,
'dlspeedtime': d_testtime,
'upspeeddate': u_testdate,
'dlspeeddate': d_testdate
}
And it is called infos, so use it
data = { "key":"xxxxxxxx",
"download":infos['dlspeed']
"upload":infos['upspeed'],
"latency":infos['latency']}
I would like to read Windows' event log. I am not sure if it's the best way but I would like to use the pywin32 -> win32evtlog module to do so. First and foremost is it possible to read logs from Windows 7 using this library and if so how to read events associated with applications runs (running an .exe must leave a trace in the event log in windows i guess).
I have managed to find some little example on the net but it's not enough for me and the documentation isn't well written unfortunately ;/
import win32evtlog
hand = win32evtlog.OpenEventLog(None,"Microsoft-Windows-TaskScheduler/Operational")
print win32evtlog.GetNumberOfEventLogRecords(hand)
you can find plenty of demos related to the winapi in your C:\PythonXX\Lib\site-packages\win32\Demos folder. In this folder you'll find a script named eventLogDemo.py. There you can see how to use win32evtlog module. Just start this script with eventLogDemo.py -v and you will get prints from your Windows event log with logtype Application.
In case you can't find this script:
import win32evtlog
import win32api
import win32con
import win32security # To translate NT Sids to account names.
import win32evtlogutil
def ReadLog(computer, logType="Application", dumpEachRecord = 0):
# read the entire log back.
h=win32evtlog.OpenEventLog(computer, logType)
numRecords = win32evtlog.GetNumberOfEventLogRecords(h)
# print "There are %d records" % numRecords
num=0
while 1:
objects = win32evtlog.ReadEventLog(h, win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ, 0)
if not objects:
break
for object in objects:
# get it for testing purposes, but dont print it.
msg = win32evtlogutil.SafeFormatMessage(object, logType)
if object.Sid is not None:
try:
domain, user, typ = win32security.LookupAccountSid(computer, object.Sid)
sidDesc = "%s/%s" % (domain, user)
except win32security.error:
sidDesc = str(object.Sid)
user_desc = "Event associated with user %s" % (sidDesc,)
else:
user_desc = None
if dumpEachRecord:
print "Event record from %r generated at %s" % (object.SourceName, object.TimeGenerated.Format())
if user_desc:
print user_desc
try:
print msg
except UnicodeError:
print "(unicode error printing message: repr() follows...)"
print repr(msg)
num = num + len(objects)
if numRecords == num:
print "Successfully read all", numRecords, "records"
else:
print "Couldn't get all records - reported %d, but found %d" % (numRecords, num)
print "(Note that some other app may have written records while we were running!)"
win32evtlog.CloseEventLog(h)
def usage():
print "Writes an event to the event log."
print "-w : Dont write any test records."
print "-r : Dont read the event log"
print "-c : computerName : Process the log on the specified computer"
print "-v : Verbose"
print "-t : LogType - Use the specified log - default = 'Application'"
def test():
# check if running on Windows NT, if not, display notice and terminate
if win32api.GetVersion() & 0x80000000:
print "This sample only runs on NT"
return
import sys, getopt
opts, args = getopt.getopt(sys.argv[1:], "rwh?c:t:v")
computer = None
do_read = do_write = 1
logType = "Application"
verbose = 0
if len(args)>0:
print "Invalid args"
usage()
return 1
for opt, val in opts:
if opt == '-t':
logType = val
if opt == '-c':
computer = val
if opt in ['-h', '-?']:
usage()
return
if opt=='-r':
do_read = 0
if opt=='-w':
do_write = 0
if opt=='-v':
verbose = verbose + 1
if do_write:
ph=win32api.GetCurrentProcess()
th = win32security.OpenProcessToken(ph,win32con.TOKEN_READ)
my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
win32evtlogutil.ReportEvent(logType, 2,
strings=["The message text for event 2","Another insert"],
data = "Raw\0Data".encode("ascii"), sid = my_sid)
win32evtlogutil.ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_WARNING_TYPE,
strings=["A warning","An even more dire warning"],
data = "Raw\0Data".encode("ascii"), sid = my_sid)
win32evtlogutil.ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_INFORMATION_TYPE,
strings=["An info","Too much info"],
data = "Raw\0Data".encode("ascii"), sid = my_sid)
print("Successfully wrote 3 records to the log")
if do_read:
ReadLog(computer, logType, verbose > 0)
if __name__=='__main__':
test()
I hope this script fits your needs