Get value from variable instead of command line in argparse - python

I am using pynetdicom script to fetch the data from the dcm4chee.
to run the script I need to pass the arguments from the command line.
But I already have that value in some variable or in other object and I need to use from there but I am not getting how can I pass that value to the parser or Is it possible to do without parsing.
Please help me to know how can I pass the value using some variables instead of passing through the command line.
Script :
#!/usr/bin/python
"""
For help on usage,
python qrscu.py -h
"""
import argparse
from netdicom.applicationentity import AE
from netdicom.SOPclass import *
from dicom.dataset import Dataset, FileDataset
from dicom.UID import ExplicitVRLittleEndian, ImplicitVRLittleEndian, ExplicitVRBigEndian
import netdicom
import tempfile
# parse commandline
parser = argparse.ArgumentParser(description='storage SCU example')
print "parser", parser
parser.add_argument('remotehost')
parser.add_argument('remoteport', type=int)
parser.add_argument('searchstring')
parser.add_argument('-p', help='local server port', type=int, default=9999)
parser.add_argument('-aet', help='calling AE title', default='PYNETDICOM')
parser.add_argument('-aec', help='called AE title', default='REMOTESCU')
parser.add_argument('-implicit', action='store_true', help='negociate implicit transfer syntax only', default=False)
parser.add_argument('-explicit', action='store_true', help='negociate explicit transfer syntax only', default=False)
args = parser.parse_args()
print "args :::: ", type(args), args
if args.implicit:
ts = [ImplicitVRLittleEndian]
elif args.explicit:
ts = [ExplicitVRLittleEndian]
else:
ts = [
ExplicitVRLittleEndian,
ImplicitVRLittleEndian,
ExplicitVRBigEndian
]
# call back
def OnAssociateResponse(association):
print "Association response received"
def OnAssociateRequest(association):
print "Association resquested"
return True
def OnReceiveStore(SOPClass, DS):
print "Received C-STORE", DS.PatientName
try:
# do something with dataset. For instance, store it.
file_meta = Dataset()
file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
file_meta.MediaStorageSOPInstanceUID = "1.2.3" # !! Need valid UID here
file_meta.ImplementationClassUID = "1.2.3.4" # !!! Need valid UIDs here
filename = '%s/%s.dcm' % (tempfile.gettempdir(), DS.SOPInstanceUID)
ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
ds.update(DS)
ds.save_as(filename)
print "File %s written" % filename
except:
pass
# must return appropriate status
return SOPClass.Success
# create application entity with Find and Move SOP classes as SCU and
# Storage SOP class as SCP
MyAE = AE(args.aet, args.p, [PatientRootFindSOPClass,
PatientRootMoveSOPClass,
VerificationSOPClass], [StorageSOPClass], ts)
MyAE.OnAssociateResponse = OnAssociateResponse
MyAE.OnAssociateRequest = OnAssociateRequest
MyAE.OnReceiveStore = OnReceiveStore
MyAE.start()
# remote application entity
RemoteAE = dict(Address=args.remotehost, Port=args.remoteport, AET=args.aec)
# create association with remote AE
print "Request association"
assoc = MyAE.RequestAssociation(RemoteAE)
# perform a DICOM ECHO
print "DICOM Echo ... ",
if assoc:
st = assoc.VerificationSOPClass.SCU(1)
print 'done with status "%s"' % st
print "DICOM FindSCU ... ",
print "\n\n----------------------------------------------------------------------\n\n"
d = Dataset()
d.StudyDate = args.searchstring
d.QueryRetrieveLevel = "STUDY"
d.PatientID = "*"
study = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
print 'done with status "%s"' % st
print "\n\n\n Cont...", study
print "\n\n----------------------------------------------------------------------\n\n"
# loop on patients
for pp in study:
print "\n\n----------------------Pateint Detals------------------------------------------------\n\n"
print "%s - %s" % (pp.StudyDate, pp.PatientID)
# find studies
d = Dataset()
d.PatientID = pp.PatientID
d.QueryRetrieveLevel = "STUDY"
d.PatientName = ""
d.StudyInstanceUID = ""
d.StudyDate = ""
d.StudyTime = ""
d.StudyID = ""
d.ModalitiesInStudy = ""
d.StudyDescription = ""
studies = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
# loop on studies
for st in studies:
print "\n study :: ", studies
print "\n\n---------------------------Study---------------------------\n\n"
print " %s - %s %s" % (st.StudyDescription, st.StudyDate, st.StudyTime)
d = Dataset()
d.QueryRetrieveLevel = "SERIES"
d.StudyInstanceUID = st.StudyInstanceUID
d.SeriesInstanceUID = ""
d.InstanceNumber = ""
d.Modality = ""
d.SeriesNumber = ""
d.SeriesDescription = ""
d.AccessionNumber = ""
d.SeriesDate = ""
d.SeriesTime = ""
d.SeriesID = ""
d.NumberOfSeriesRelatedInstances = ""
series = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
# print series uid and number of instances
if series:
for se in series:
print "\n\n---------------------------Series---------------------------\n\n"
print "\n\n\n series", se
print " %15s - %10s - %35s - %5s" % (se.SeriesNumber, se.Modality, se.SeriesDescription, se.NumberOfSeriesRelatedInstances)
print "Release association"
assoc.Release(0)
# done
MyAE.Quit()
else:
print "Failed to create Association."
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

Put everything inside functions and call them wherever you like:
#!/usr/bin/python
"""
For help on usage,
python qrscu.py -h
"""
import os
import argparse
from netdicom.applicationentity import AE
from netdicom.SOPclass import *
from dicom.dataset import Dataset, FileDataset
from dicom.UID import ExplicitVRLittleEndian, ImplicitVRLittleEndian, ExplicitVRBigEndian
import netdicom
import tempfile
# call back
def OnAssociateResponse(association):
print "Association response received"
def OnAssociateRequest(association):
print "Association resquested"
return True
def OnReceiveStore(SOPClass, DS):
print "Received C-STORE", DS.PatientName
try:
# do something with dataset. For instance, store it.
file_meta = Dataset()
file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
file_meta.MediaStorageSOPInstanceUID = "1.2.3" # !! Need valid UID here
file_meta.ImplementationClassUID = "1.2.3.4" # !!! Need valid UIDs here
filename = os.path.join(tempfile.gettempdir(), DS.SOPInstanceUID + '%s.dcm')
ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
ds.update(DS)
ds.save_as(filename)
print "File %s written" % filename
except Exception as e:
print "Some exception occured", e
# must return appropriate status
return SOPClass.Success
def print_data(remotehost, remoteport, searchstring, local_port=9999,
calling_title='PYNETDICOM', called_title='REMOTESCU',
ts=(ExplicitVRLittleEndian, ImplicitVRLittleEndian, ExplicitVRBigEndian)):
# create application entity with Find and Move SOP classes as SCU and
# Storage SOP class as SCP
MyAE = AE(calling_title, local_port, [PatientRootFindSOPClass,
PatientRootMoveSOPClass,
VerificationSOPClass], [StorageSOPClass], ts)
MyAE.OnAssociateResponse = OnAssociateResponse
MyAE.OnAssociateRequest = OnAssociateRequest
MyAE.OnReceiveStore = OnReceiveStore
MyAE.start()
# remote application entity
RemoteAE = dict(Address=remotehost, Port=remoteport, AET=called_title)
# create association with remote AE
print "Request association"
assoc = MyAE.RequestAssociation(RemoteAE)
# perform a DICOM ECHO
print "DICOM Echo ... ",
if not assoc:
print "Failed to create Association."
return
st = assoc.VerificationSOPClass.SCU(1)
print 'done with status "%s"' % st
print "DICOM FindSCU ... ",
print "\n\n----------------------------------------------------------------------\n\n"
d = Dataset()
d.StudyDate = searchstring
d.QueryRetrieveLevel = "STUDY"
d.PatientID = "*"
study = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
print 'done with status "%s"' % st
print "\n\n\n Cont...", study
print "\n\n----------------------------------------------------------------------\n\n"
# loop on patients
for pp in study:
print "\n\n----------------------Pateint Detals------------------------------------------------\n\n"
print "%s - %s" % (pp.StudyDate, pp.PatientID)
# find studies
d = Dataset()
d.PatientID = pp.PatientID
d.QueryRetrieveLevel = "STUDY"
d.PatientName = ""
d.StudyInstanceUID = ""
d.StudyDate = ""
d.StudyTime = ""
d.StudyID = ""
d.ModalitiesInStudy = ""
d.StudyDescription = ""
studies = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
# loop on studies
for st in studies:
print "\n study :: ", studies
print "\n\n---------------------------Study---------------------------\n\n"
print " %s - %s %s" % (st.StudyDescription, st.StudyDate, st.StudyTime)
d = Dataset()
d.QueryRetrieveLevel = "SERIES"
d.StudyInstanceUID = st.StudyInstanceUID
d.SeriesInstanceUID = ""
d.InstanceNumber = ""
d.Modality = ""
d.SeriesNumber = ""
d.SeriesDescription = ""
d.AccessionNumber = ""
d.SeriesDate = ""
d.SeriesTime = ""
d.SeriesID = ""
d.NumberOfSeriesRelatedInstances = ""
series = [x[1] for x in assoc.PatientRootFindSOPClass.SCU(d, 1)][:-1]
# print series uid and number of instances
for se in series:
print "\n\n---------------------------Series---------------------------\n\n"
print "\n\n\n series", se
print " %15s - %10s - %35s - %5s" % (se.SeriesNumber, se.Modality, se.SeriesDescription, se.NumberOfSeriesRelatedInstances)
print "Release association"
assoc.Release(0)
# done
MyAE.Quit()
def parse_commandline():
# parse commandline
parser = argparse.ArgumentParser(description='storage SCU example')
print "parser", parser
parser.add_argument('remotehost')
parser.add_argument('remoteport', type=int)
parser.add_argument('searchstring')
parser.add_argument('-p', help='local server port', type=int, default=9999)
parser.add_argument('-aet', help='calling AE title', default='PYNETDICOM')
parser.add_argument('-aec', help='called AE title', default='REMOTESCU')
parser.add_argument('-implicit', action='store_true', help='negociate implicit transfer syntax only', default=False)
parser.add_argument('-explicit', action='store_true', help='negociate explicit transfer syntax only', default=False)
args = parser.parse_args()
print "args :::: ", type(args), args
if args.implicit:
ts = [ImplicitVRLittleEndian]
elif args.explicit:
ts = [ExplicitVRLittleEndian]
else:
ts = [
ExplicitVRLittleEndian,
ImplicitVRLittleEndian,
ExplicitVRBigEndian
]
return args, ts
if __name__ == '__main__':
args, ts = parse_commandline()
print_data(args.remotehost, args.remoteport, args.searchstring, args.p, args.aet, args.aec, ts)
and use it like:
import your_module
your_module.print_data(remotehost, remoteport, searchstring)

You can incorporate and use the script above in your own code by using the pyhton module subprocess. It will let you run the script with arguments that you define depending on your variables or objects.
Example: Let say that you have some variables your_arg_1...your_arg_n that can be consumed by grscu.py. Then you can pass these variables to the script with
import subprocess
r = subprocess.check_call(['grscu.py', your_arg_1, your_arg_2, ..., your_arg_n])
The "args = parser.parse_args()" in the script will grab the variables and pass them to the MyAE object. For more information about argparse, see link.

Related

Table values not getting inserted into mysql database until cntrl+c is entered to close python file in linux

Edited
Q)Can someone help getting the values inserted into mysql database , just confused where place mydb function
Reason :Once I manually enter cntrl+c for .py , then only the values are getting inserted into mysql database
Used in the .py file
here is the complete code , where should i place the mydb function?
Table values not getting inserted into mysql database until cntrl+c is entered to close python file in linux
import os
import re
from builtins import len, Exception
import slack
import logging
from subprocess import check_output
import datetime
import mysql.connector
import time
import json
import requests
#user_threads_info = {}
#thread_ts = ""
#slack.RTMClient.run_on(event='message')
def say_hello(**payload):
try:
##0 get clients and payload
logging.info('msg received')
data = payload['data']
web_client = payload['web_client']
rtm_client = payload['rtm_client']
##0 - 1 Check if it is the first msg, not replied msg by me
# print(data)
if data.get('text') == None:
logging.info('This msg is my replied msg.')
return False
##0-2 Get channel info
channel_id = data['channel']
thread_ts = data['ts']
global user
user = data['user']
#user_info = get_userinfo(user)
#print(user_info)
msg = data['text']
##1 get scenario submsg
retVal = analysis_msg(msg)
# print(retVal)
response = web_client.users_list()
assert(response['ok'])
user_map = {x['id']: x['name'] for x in response['members']}
global user_name
user_name = user_map[user] if user in user_map else None
print(user_name)
if retVal[0] == False:
retMsg = retVal[1] + "\nI can create the following orders. \n" \
"a) spu - store pickup \n" \
"b) sth - ship to home \n" \
"c) del - delivery \n" \
"d) digitalAsGuest - Digital item \n" \
" \n" \
"Please provide information as mentioned in below example.\n" \
" \n" \
"Example: spu:3646989:sftqa3:AMEX\n" \
"\n" \
"Sample SKUS:\n" \
"spu - [3646989,8862011]\n" \
"sth - [2592015,6140094]\n" \
"del - [5592005,8862011]\n" \
"digitalAsGuest - [2810037,5057400]"
send_msg(web_client, channel_id, thread_ts, user, retMsg)
return False
##2 form cmd
retVal = form_cmd(retVal[1])
print(retVal)
if retVal == False:
return False
##3 execute cmd
# inform the start of test
retMsg = "Creating an order,Please wait for the result."
send_msg(web_client, channel_id, thread_ts, user, retMsg)
global res
try:
res1 = os.popen(retVal).read()
print("Printing result...")
print(res1)
print("end of print")
res = reg_result_new(res1)
if res == False:
print("reg_function failure")
retMsg = "The test order placement failed."
else:
retMsg = "Order Id - " + res['id'] + "\nFirst Name - " + res['firstName'] + "\nLast Name - " + res['lastName'] + "\n PhoneNumber - " + res['dayPhoneNumber'] + "\n Email - " + res['email'] + "\n"
except Exception as ee:
retMsg = "The test scenario has a failure. Please Check the feature file."
## 4 send result to slack
# retMsg = "Order Id - " + res['id'] + "\nFirst Name - " + res['firstName'] + "\nLast Name - " + res['lastName'] + "\n PhoneNumber - " + res['day PhoneNumber'] + "\n Email - " + res['email'] + "\n"
create_result_file(user, res)
send_msg(web_client, channel_id, thread_ts, user, retMsg)
print(retVal)
except Exception as e:
print("error")
logging.critical(str(e))
############################ My handlers ##############################
def create_result_file(user, res):
try:
cur_time = datetime.datetime.now()
file_name = user + str(cur_time.year) + str(cur_time.month) + str(cur_time.day) + str(cur_time.hour) + str(
cur_time.minute) + str(cur_time.second) + '.txt'
file = open(file_name, 'w')
file.write(res)
file.close()
except Exception as e:
print(str(e))
def send_msg(web_client, channel_id, thread_ts,user,mgs):
print("thread_ts value is:"+thread_ts)
web_client.chat_postMessage(
channel=channel_id,
text=f"```Hi <#{user}>! \n " + mgs + "```",
thread_ts=thread_ts
)
#def get_userinfo(user):
# payload = {'token': slack_token, 'user': user}
# r = requests.get('https://slack.com/api/users.info', params=payload)
# print(r.text)
# return json.loads(r.text)["user"]
# error code mgmt.
def error_code(code):
# reserved
print(code)
return [False, code]
# break down msg to the test scenario submsgs
def analysis_msg(msg):
global submsg
submsg = msg.split(":")
for value in submsg:
print(value)
if len(submsg) != 4:
logging.warning("This msg not test scenario")
return error_code("Please check the format")
res = {}
res["feature"] = submsg[0]
res["sku"] = submsg[1]
res["env"] = submsg[2]
res["payment"] = submsg[3]
###check
if validate_sku(res["sku"]) == False:
return error_code("INVALID_SKU \n")
if validate_env(res["env"]) == False:
return error_code("INVALID_ENV \n")
if validate_payment(res["payment"]) == False:
return error_code("INVALID_payment \n")
if check_specialCharacter(res["feature"]) == False:
return error_code("INVALID_PROFILE_WITH_SPECIAL_CHARACTER")
return [True, res]
# form cmd for test bat files ! reserved
def form_cmd(submsg):
cmd = 'sh /home/iptbot/iptautobot/test.sh ' + submsg['env'] + ' ' + submsg['feature'] + ' ' + submsg["sku"] + ' ' + submsg["payment"]
return cmd
#code to print user details
#code to print user details
def reg_result_new(res):
start = 'COP Order Response :'
end = 'isGuestMode'
start_index = res.find(start) + len(start)
res = res[start_index:]
end_index = res.find(end) + 22
global data
data = res[:end_index]
try:
print('Data -> ' + str(data))
data = json.loads(data.strip())
new_data = {}
new_data['id'] = data['id']
new_data['firstName'] = data['lineItems'][0]['fulfillmentInfo']['storeInfo']['agentInfo']['firstName']
new_data['lastName'] = data['lineItems'][0]['fulfillmentInfo']['storeInfo']['agentInfo']['lastName']
new_data['dayPhoneNumber'] = data['lineItems'][0]['fulfillmentInfo']['storeInfo']['agentInfo']['dayPhoneNumber']
new_data['email'] = data['lineItems'][0]['fulfillmentInfo']['storeInfo']['agentInfo']['email']
#new_data['firstName'] = data['paymentInfo']['billingAddressInfo']['firstName']
return new_data
except Exception as e:
print('Here error -> '+str(e))
return False
#def reg_result(res):
# "COP Order Response"
# lines = res.split('\n')
# for line in lines:
# pattern = "COP Order Response*"
# prog = re.compile(pattern)
# result = prog.search(line)
# if result == None:
# continue
# res1 = result.string.split('{')
# if len(res1) < 2:
# continue
# res2 = res1[1].split(',')
# if len(res2) < 2:
# continue
# res3 = res2[0].split(':')
# if len(res3) < 2:
# continue
# return res3[1]
# COP Order Response : {"id":"BBY01-200001878853"
# return False
# return val is Boolean
# True/False
# Input type: String
# for positive integer only
# alternative way: Handle exception for int(d)
def validate_sku(sku_val):
return sku_val.isnumeric()
# input val : string
# return val: Boolean
def validate_env(env_val):
env_list = [
"sftqa1" , "sftqa2" , "sftqa3" , "sftqa4"
]
if env_val in env_list:
return True
else:
return False
def validate_payment(payment_val):
env_payment = [
"AMEX","VISA"
]
if payment_val in env_payment:
return True
else:
return False
# input val : string
# return val: Boolean
def check_specialCharacter(s):
if s == "":
return False
if s.isspace():
return False
return s.isalnum()
slack_token = os.environ["SLACK_API_TOKEN"]
rtm_client = slack.RTMClient(token=slack_token)
rtm_client.start()
#database connction
mydb = mysql.connector.connect(
host="host",
user="user",
passwd="pass",
database="db"
)
mycursor = mydb.cursor()
for value in submsg:
print(value)
fulfilment=submsg[0]
sku=submsg[1]
environment=submsg[2]
payment=submsg[3]
ts = time.time()
date = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
orderNumber=data['id']
username=user_name
print(fulfilment)
print(sku)
print(environment)
print(payment)
print(username)
print(orderNumber)
sqlformula = "INSERT INTO orderDetails (fulfilment,sku,environment,payment,orderNumber,date,user) VALUES (%s,%s,%s,%s,%s,%s,%s)"
#order=("sth",3643387,"sftqa2","AMEX")
#mycursor.execute(sqlformula,order)
mycursor.execute(sqlformula,(fulfilment,sku,environment,payment,orderNumber,date,username))
mydb.commit()
mydb.close()
Output
1 sh /home/iptbot/iptautobot/test.sh sftqa3 spu 3646989 AMEX
2 error
3 CRITICAL:root:'user'
4 error
5 CRITICAL:root:'user' // clicking Control+C values get inserted
6 ^CWARNING:slack.rtm.client:Websocket was closed.
7 3646989
8 sftqa3
9 AMEX
10 spu
11 3646989
12 sftqa3
13 AMEX
14 a6002043
15 BBY01-200002091354
You are stuck at this point because rtm_client.start() is a synchronous call.
If you want it to be asynchronous (non-blocking) then you should run:
rtm_client.start(run_async=True)
Here it is good walk-through on how to setup async usage of the library. Also have a look at the method signature for RTMClient to get an idea of how it works.
Here's a good example detailing a lot of what you would need in your case.
Then you will hit your db execution code where you will need to have a while loop to go through the data you want to add to the DB.
I would recommend that you use a Queue for this as it is synchronised and will be easier to manage than a global list which is overwritten on every order. Preferably you could use asyncio.Queue with an example of implementation here
When an order has passed the validation steps add it to the queue. Here is some pseudo code describing the flow with a basic (not asyncio) Queue:
import queue
q = queue.Queue()
def validate_order(order):
valid_order_data = ......
q.put(valid_order_data)
while True:
valid_order = q.get() # Will wait until there is a value on the queue
mycursor.execute(sqlformula, (valid_order))

Passing variables in python to another web platform

I have a code which requires to pass the latency, upspeed, dlspeed to another web site to display. Right now the code is as below
import datetime
import os
import sys
import shutil
import webbrowser
import tempfile
import subprocess
import json
import urllib.request
import statistics
import pymysql
import pymysql.cursors
IPERF3_WIN_PATH = "data/iperf3.exe"
HTML_TEMPLATE_PATH = "data/template.html"
IPERF3_HOST = "127.0.0.1"
RESULT_UPLOAD_URL = "UPLOAD URL"
RESULT_VIEW_URL = "VIEW URL"
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller
This is to get a path which will work with pyinstaller
"""
try:
# PyInstaller creates a temp folder and stores path in
# _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
def ping(ip, tries):
""" Ping "ip" using the windows ping commmand
Return the average ping as a int
"""
res = 0
try:
output = subprocess.check_output(
["ping", "-n", str(tries), ip]).decode("utf-8")
res = int(output.split(" = ")[-1].split("ms")[0])
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Error while trying to ping the server, exiting")
else:
return res
def copyIperf3Exec():
""" On OSX :
Copy the iperf3 binary to a tmp file,
make it executable and return his path
This is to avoid many bundle related problems
On Windows, just return the package path """
return resource_path(IPERF3_WIN_PATH)
def get_iperf3_download():
""" Return the output of the iperf3 cli as a python dict """
ipf3_tmp = copyIperf3Exec()
try:
output = subprocess.check_output([ipf3_tmp,
"-c", IPERF3_HOST,
"-J",
"-P", "16",
"-w", "710000",
"-R"])
res_string = output.decode("utf-8")
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Problem while doing the test, please try again later")
else:
return json.loads(res_string)
def get_iperf3_upload():
""" Return the output of the iperf3 cli as a python dict """
ipf3_tmp = copyIperf3Exec()
try:
output = subprocess.check_output([ipf3_tmp,
"-c", IPERF3_HOST,
"-J",
"-P", "10",
"-w", "710000"])
res_string = output.decode("utf-8")
except subprocess.CalledProcessError:
input("Press Enter to Continue...")
sys.exit("Error while doing the upload test, please try again later")
else:
return json.loads(res_string)
def get_userinfos():
""" Get the 3 informations to be presented to the user
( ip, upload speed, download speed )
Return a Dictionary
"""
show_start_msg(0) # 0% Progress bar
avg_latency = ping(IPERF3_HOST, 5)
u_json = get_iperf3_upload()
show_start_msg(1) # 40%
d_json = get_iperf3_download()
show_start_msg(2) # 80%
ip = getip_apify()
u_bits_per_second = u_json['end']['sum_received']['bits_per_second']
d_bits_per_second = d_json['end']['sum_received']['bits_per_second']
u_testtime = u_json['end']['sum_received']['seconds']
d_testtime = d_json['end']['sum_received']['seconds']
u_testdate = u_json["start"]["timestamp"]["timesecs"]
d_testdate = d_json["start"]["timestamp"]["timesecs"]
res = {
'ip': ip,
'latency': avg_latency,
'upspeed': u_bits_per_second,
'dlspeed': d_bits_per_second,
'upspeedtime': u_testtime,
'dlspeedtime': d_testtime,
'upspeeddate': u_testdate,
'dlspeeddate': d_testdate
}
return res
def sendToDB(infos):
# Connect to the database
connection = pymysql.connect(host='127.0.0.1',
user='testclient',
password='password',
db='speed',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
def stp_date(stp):
return datetime.datetime.fromtimestamp(stp).strftime(
'%Y-%m-%d %H:%M:%S')
sql = ("INSERT INTO `speedlog`"
"(`externalIP`, `uploadspeed`, `uploadspeedtime`,"
"`uploadspeeddate`, `downloadspeed`, `downloadspeedtime`,"
"`downloadspeeddate`, `latency`)"
"VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(sql,
(infos["ip"],
str(int(infos["upspeed"])),
str("{0:.2f}".format(infos["upspeedtime"])),
stp_date(infos["upspeeddate"]),
str(int(infos["dlspeed"])),
str("{0:.2f}".format(infos["dlspeedtime"])),
stp_date(infos["dlspeeddate"]),
str(int(infos["latency"]))))
# connection is not autocommit by
# default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
return
def getip_apify():
res = urllib.request.urlopen("http://api.ipify.org")
raw_ip = res.read()
return raw_ip.decode('utf-8')
def prepare_template(templatePath, infos):
""" Load an html located at templatePath and replace the necessary text
with the associated values from the iPerf3 infos
Return a string
"""
f_template = open(templatePath)
s_template = f_template.read()
f_template.close()
mod_template = s_template.replace("avglatency", str(int(infos['latency'])))
mod_template = mod_template.replace(
"upspeed", str("{0:.3f}".format(infos['upspeed']/(1000*1000*1000))))
mod_template = mod_template.replace(
"dlspeed", str("{0:.3f}".format(infos['dlspeed']/(1000*1000*1000))))
return mod_template
def str_to_tempHtml(str):
""" Write "str" in an .html temporary file
And return his path
"""
data = bytes(str, "utf-8")
tmp = tempfile.NamedTemporaryFile(suffix=".html", delete=False)
tmp.write(data)
tmp.flush()
return tmp.name
def show_start_msg(progress):
if sys.platform.startswith('darwin'):
unused = os.system('clear')
elif sys.platform.startswith('win32'):
unused = os.system('cls')
print("="*70)
print("Speed Testing for 10G Network \n")
print("Powered by iPerf3")
print("="*70)
if progress == -1:
input("Press Enter to Continue...\n")
return
else:
print("Press Enter to Continue...\n")
print("Testing in progress")
if progress == 0:
print("[" + " "*68 + "]" + " 0%")
elif progress == 1:
print("[" + "#" * 27 + " " * 41 + "]" + " 40%")
elif progress == 2:
print("[" + "#" * 54 + " " * 14 + "]" + " 80%")
elif progress == 3:
print("[" + "#"*68 + "]" + " 100%")
print("Completed")
if __name__ == '__main__':
show_start_msg(-1)
infos = get_userinfos()
sendToDB(infos)
show_start_msg(3) # 100% Complete
data = { "key":"Jasdkjfhsda349*lio34sdfFdslaPisdf",
"download":"2048000",
"upload":"2048000",
"latency":"10"}
req = urllib.request.Request(RESULT_UPLOAD_URL, json.dumps(data).encode(
'ascii'))
req.add_header('Content-Type', 'application/json')
resp = urllib.request.urlopen(req).read().decode('ascii')
resp = resp.replace('\'', '"')
webbrowser.open(RESULT_VIEW_URL.format(json.loads(resp)['test_id']))
input("Press Enter to Continue...")
My latency, upspeed and dlspeed variables are stored as infos, and later sent over to the DB for recording via sendtoDB(infos).
The next part is to also pass these sets of variables to another web using RESTful, which in the data, the first attribute "key" is the REST key for authentication, followed by the rest of the values like latency, downloadspeed and uploadspeed. However, you can see that in the data, all the 3 variables are hard-coded value instead of the values derived from the test, which is latency, upspeedand dlspeed.
How can I modify the code to get these attributes instead of the hardcoded ones?
You have a method that returns this dictionary...
res = {
'ip': ip,
'latency': avg_latency,
'upspeed': u_bits_per_second,
'dlspeed': d_bits_per_second,
'upspeedtime': u_testtime,
'dlspeedtime': d_testtime,
'upspeeddate': u_testdate,
'dlspeeddate': d_testdate
}
And it is called infos, so use it
data = { "key":"xxxxxxxx",
"download":infos['dlspeed']
"upload":infos['upspeed'],
"latency":infos['latency']}

get the first 10 google results using googleapi

I need to get the first 10 google results
for example:
... query = urllib.urlencode({'q' : 'example'})
...
... url = 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&%s' \
... % (query)
... search_results = urllib.urlopen(url)
... json = simplejson.loads(search_results.read())
... results = json['responseData']['results']
this will give me the results of the first page, but I`d like to get more google results, do anyone know how to do that?
I've done it in the past, here is complete example (i'm not python guru, but it works):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, getopt
import urllib
import simplejson
OPTIONS = ("m:", ["min="])
def print_usage():
s = "usage: " + sys.argv[0] + " "
for o in OPTIONS[0]:
if o != ":" : s += "[-" + o + "] "
print(s + "query_string\n")
def search(query, index, offset, min_count, quiet=False, rs=[]):
url = "http://ajax.googleapis.com/ajax/services/search/web?v=1.0&rsz=large&%s&start=%s" % (query, offset)
result = urllib.urlopen(url)
json = simplejson.loads(result.read())
status = json["responseStatus"]
if status == 200:
results = json["responseData"]["results"]
cursor = json["responseData"]["cursor"]
pages = cursor["pages"]
for r in results:
i = results.index(r) + (index -1) * len(results) + 1
u = r["unescapedUrl"]
rs.append(u)
if not quiet:
print("%3d. %s" % (i, u))
next_index = None
next_offset = None
for p in pages:
if p["label"] == index:
i = pages.index(p)
if i < len(pages) - 1:
next_index = pages[i+1]["label"]
next_offset = pages[i+1]["start"]
break
if next_index != None and next_offset != None:
if int(next_offset) < min_count:
search(query, next_index, next_offset, min_count, quiet, rs)
return rs
def main():
min_count = 64
try:
opts, args = getopt.getopt(sys.argv[1:], *OPTIONS)
for opt, arg in opts:
if opt in ("-m", "--min"):
min_count = int(arg)
assert len(args) > 0
except:
print_usage()
sys.exit(1)
qs = " ".join(args)
query = urllib.urlencode({"q" : qs})
search(query, 1, "0", min_count)
if __name__ == "__main__":
main()
Edit: i've fixed obvious command-line options mishandling; you can call this script as follows:
python gsearch.py --min=5 vanessa mae
--min switch means "at least 5 results" and is optional, you will get maximum allowed result count (64) if it is not specified.
Also, error handling is omitted for brevity.
See docs http://code.google.com/apis/websearch/docs/reference.html#_intro_fonje
You’re looking for the start parameter.
There’s no parameter to get more results in one response, but you can iterate via the start parameter.

AttributeError: 'NoneType' object has no attribute 'findSongBySize'

I'm new to Ubuntu (and the Python scripts that go with it) and I've been hitting this error with the iTunesToRhythm script.
**Traceback (most recent call last):
File "/home/amylee/iTunesToRhythm.py", line 220, in <module>
main(sys.argv)
File "/home/amylee/iTunesToRhythm.py", line 48, in main
match = correlator.correlateSong( song, options.confirm, options.fastAndLoose, options.promptForDisambiguate )
File "/home/amylee/iTunesToRhythm.py", line 133, in correlateSong
matches = self.parser.findSongBySize( song.size );
AttributeError: 'NoneType' object has no attribute 'findSongBySize'**
I understand the concept behind fixing the issue but have no idea how to go about it. I've looked at answers to similar problems but none really help me, especially since I have no clue as to what I am doing. I've included the full script below. Thanks in advance, dudes who know way more about this stuff than I do.
----iTunesToRhythm.py----
import sys
import platform
if platform.system() == "Darwin":
sys.path.append('/sw/lib/python2.5/site-packages/')
from dumpitunesmac import iTunesMacParser, iTunesMacSong
import libxml2
import linecache
from optparse import OptionParser, OptionGroup
from dumprhythm import RhythmLibraryParser, RhythmSong
from dumpitunes import iTunesLibraryParser, iTunesSong
def main(argv):
# process command line
options, args = processCommandLine(argv)
print "Reading input from " + args[0]
inputParser = getParser(args[0], options )
print "Writing to output " + args[1]
destinationParser = getParser(args[1], options )
#retrieve destination songs
allDestinationSongs = destinationParser.getSongs()
# go through each song in destination library
correlator = SongCorrelator(inputParser)
for song in allDestinationSongs:
print song.artist + " - " + song.album + " - " + song.title + " - " + str(song.size)
if song.size != None and song.size != "Unknown":
# find equivalent itunes song
match = correlator.correlateSong( song, options.confirm, options.fastAndLoose, options.promptForDisambiguate )
# update database, if match
if match != None and options.writeChanges == True:
if options.noratings == False:
song.setRating( match.rating )
print "\t\t\tRating changed to " + str( match.rating )
if options.noplaycounts == False:
song.setPlaycount( match.playcount )
print "\t\t\tPlay count changed to " + str( match.playcount )
# dump summary results
print "\nSummary\n------------------------------------"
print "manually resolved matches = " + str( correlator.manuallyResolvedMatches)
print "full matches = " + str( correlator.fullMatches )
print "partial matches = " + str( correlator.partialMatches)
print "no matches = " + str( correlator.zeroMatches )
print "unresolved ambiguous matches = " + str( correlator.ambiguousMatches )
# save
if options.writeChanges == True:
destinationParser.save()
print "Changes were written to destination"
else:
print "Changes were not written to destination \n\tuse -w to actually write changes to disk"
def getParser( file, options ):
if file == "mysql":
print "\tassuming amarok database"
return AmarokLibraryParser(options.servername, options.database, options.username, options.password )
if file == "itunes":
print "\tassuming itunes on the mac"
return iTunesMacParser()
desc = linecache.getline( file, 2)
if desc.find("Apple Computer") != -1:
#open itunes linbrary
print "\tdetected Itunes library"
return iTunesLibraryParser(file);
if desc.find("rhythmdb") != -1:
print "\tdetected Rhythm box library"
return RhythmLibraryParser(file)
def processCommandLine( argv ):
parser = OptionParser("iTunesToRhythm [options] <inputfile>|itunes|mysql <outputfile>|mysql|itunes")
parser.add_option("-c", "--confirm", action="store_true", dest="confirm", default = False, help="confirm every match" )
parser.add_option("-w", "--writechanges", action="store_true", dest="writeChanges", default = False, help="write changes to destination file" )
parser.add_option("-a", "--disambiguate", action="store_true", dest="promptForDisambiguate", default = False, help="prompt user to resolve ambiguities" )
parser.add_option("-l", "--fastandloose", action="store_true", dest= "fastAndLoose", default = False, help = "ignore differences in files name when a file size match is made against a single song. Will not resolve multiple matches" )
parser.add_option("--noplaycounts", action="store_true", dest= "noplaycounts", default = False, help = "do not update play counts" )
parser.add_option("--noratings", action="store_true", dest= "noratings", default = False, help = "do not update ratings" )
amarokGroup = OptionGroup(parser, "Amarok options", "Options for connecting to an Amarok MySQL remote database")
amarokGroup.add_option("-s", "--server", dest="servername", help = "host name of the MySQL database server")
amarokGroup.add_option("-d", "--database", dest="database", help = "database name of the amarok database")
amarokGroup.add_option("-u", "--username", dest="username", help = "login name of the amarok database")
amarokGroup.add_option("-p", "--password", dest="password", help = "password of the user")
parser.add_option_group(amarokGroup)
# parse options
options, args = parser.parse_args()
# check that files are specified
if len(args) != 2:
parser.print_help()
parser.error( "you must supply 2 file names or 1 file name and the word mysql followed by database information. Specyfing itunes will use a running instance of iTunes on the Mac" )
# make surce source & destination are not the same
if args[0] == args[1]:
parser.error("source and destination cannot be the same")
# we're ok
return options, args
class SongCorrelator:
def __init__(self, parser ):
self.parser = parser
self.zeroMatches = 0
self.fullMatches = 0
self.ambiguousMatches = 0;
self.partialMatches = 0;
self.manuallyResolvedMatches = 0;
# attempt to find matching song in database
def correlateSong( self, song, confirm, fastAndLoose, promptForDisambiguate ):
match = None
matches = self.parser.findSongBySize( song.size );
matchcount = len(matches)
# no results
if matchcount == 0:
print "\t no matches found"
self.zeroMatches = self.zeroMatches + 1
# full match
elif matchcount == 1:
match = matches[0]
if match.title == song.title:
print "\t 100% match on " + self.dumpMatch( match )
self.fullMatches = self.fullMatches + 1
else:
if fastAndLoose == False:
match = self.disambiguate( song, matches, promptForDisambiguate )
else:
print "\t 50% match on " + self.dumpMatch( match )
self.partialMatches = self.partialMatches + 1
# multiple matches
else:
print "\t multiple matches"
for match in matches:
print "\t\t " + self.dumpMatch( match )
# attempt a resolution
match = self.disambiguate( song, matches, promptForDisambiguate )
#review
if confirm == True:
foo = raw_input( 'press <enter> to continue, Ctrl-C to cancel')
#done
return match
def dumpMatch( self, match ):
return match.title + ", playcount = " + str(match.playcount) + ", rating = " + str(match.rating)
def disambiguate(self,song,matches,prompt):
# attempt to disambiguate by title
print "\t looking for secondary match on title"
titlematchcount = 0
for match in matches:
if match.title == song.title:
titlematchcount = titlematchcount + 1
latstitlematch = match
if titlematchcount == 1:
# we successfully disambiguated using the title
print "\t\t disambiguated using title"
self.fullMatches = self.fullMatches + 1
return latstitlematch
if prompt == True:
print "\t\t cannot disambiguate. Trying to match " + song.filePath
print "Please select file or press <Enter> for no match:"
numMatch = 0
for match in matches:
numMatch = numMatch + 1
print "\t\t\t\t[" + str(numMatch) + "] " + self.dumpMatch(match) + ", " + match.filePath
selection = self.inputNumber("\t\t\t\t? ", 1, len(matches) )
if selection > 0:
self.manuallyResolvedMatches = self.manuallyResolvedMatches + 1
return matches[selection - 1]
# user did not select, record ambiguity
self.ambiguousMatches = self.ambiguousMatches + 1
return None
def inputNumber(self, msg, min, max):
result = raw_input(msg)
if len(result) == 0:
return 0
try:
resultNum = int(result)
if resultNum < min or resultNum > max:
print "out of range"
return self.inputNumber( msg, min, max )
return resultNum
except:
print "invalid input"
return self.inputNumber(msg, min, max)
if __name__ == "__main__":
main(sys.argv)
I'm the original developer. I updated the script to throw an exception if the file format is not recognized (I think this is what you are running into). I also incorporated some useful patches from another user.
Please download the files again and e-mail me if you still have trouble.
Your problem appears to be that getParser is returning None, presumably because all the if conditions have failed.
Check that args[0] and options are the values that you expect them to be.
I'd suggest raising an exception at the end of the getParser method if the arguments are not valid so that the error is raised close to the cause of the problem rather in some unrelated code much later.

Edit ini file option values with ConfigParser (Python)

Anyone know how'd I'd go about editing ini file values preferably using ConfigParser? (Or even a place to start from would be great!) I've got lots of comments throughout my config file so I'd like to keep them by just editing the values, not taking the values and playing around with multiple files.
Structure of my config file:
[name1]
URL = http://example.com
username = dog
password = password
[name2]
URL = http://catlover.com
username = cat
password = adffa
As you can see, I've got the same options for different section names, so editing just the values for one section is a bit trickier if ConfigParser can't do it.
Thanks in advance.
Here is an example
import sys
import os.path
from ConfigParser import RawConfigParser as ConfParser
from ConfigParser import Error
p = ConfParser()
# this happend to me save as ASCII
o = open("config.ini")
if o.read().startswith("\xef\xbb\xbf"):
print "Fatal Error; Please save the file as ASCII not unicode."
sys.exit()
try:
results = p.read("config.ini")
except Error, msg:
print "Error Parsing File"
print msg
else:
if results == []:
print "Could not load config.ini."
if not os.path.exists("config.ini"):
print "config.ini does not exist."
else:
print "An uknown error occurred."
else:
print "Config Details"
sections = p.sections()
sections.sort()
for s in sections:
print "------------------------"
print s
if p.has_option(s, "URL"):
print "URL: ",
print p.get(s, "URL")
else:
print "URL: No Entry"
if p.has_option(s, "username"):
print "User: ",
print p.get(s, "username")
else:
print "User: N/A"
if p.has_option(s, "password"):
print "Password: ",
print p.get(s, "password")
else:
print "Password: N/A"
Also I created this class to store my apps variables etc and also make config writing easier it was originally used with twisted but I created a simple replacement logger
import os.path
import sys
#from twisted.python import log
import ConfigParser
from traceback import print_last
class Log(object):
def msg(t):
print "Logger: %s " % t
def err(t = None):
print "-------------Error-----------"
print "\n\n"
if t is None:
print_last()
# sloppy replacement for twisted's logging functions
log = Log()
class Settings(object):
'''Stores settings'''
config_variables = ['variables_that_should_be_stored_in_config']
def __init__(self, main_folder = None, log_file = None, music_folder = None ):
# load the defaults then see if there are updates ones in the config
self.load_defaults()
self.config = ConfigParser.RawConfigParser()
if len(self.config.read(self.settings_file)) == 1:
if 'Settings' in self.config.sections():
try:
self.music_folder = self.config.get('Settings', 'music_folder')
except ConfigParser.NoOptionError:
pass
log.msg('Music Folder: %s' % self.music_folder)
try:
self.mplayer = self.config.get('Settings', 'mplayer')
except ConfigParser.NoOptionError:
pass
try:
self.eula = self.config.getboolean('Settings', 'eula')
except ConfigParser.NoOptionError:
pass
else:
log.msg('No Settings Section; Defaults Loaded')
else:
log.msg('Settings at default')
def load_defaults(self):
log.msg('Loading Defaults')
self.main_folder = os.path.dirname(os.path.abspath(sys.argv[0]))
self.settings_file = os.path.join(self.main_folder, 'settings.cfg')
self.log_file = os.path.join(self.main_folder, 'grooveshark.log')
self.music_folder = os.path.join(self.main_folder, 'Music')
self.grooveshark_started = False
self.eula = False
self.download_percent = 0.5# default buffer percent is 50 %
if sys.platform == 'win32' or sys.platform == 'cygwin':# Windows
if os.path.exists( os.path.join(self.main_folder, 'mplayer', 'mplayer.exe') ):
self.mplayer = os.path.join(self.main_folder, 'mplayer', 'mplayer.exe')
elif os.path.exists( os.path.join(self.main_folder, '/mplayer.exe') ):
self.mplayer = os.path.join(self.main_folder, '/mplayer.exe')
else:
self.mplayer = 'download'
elif sys.platform == 'darwin':# Mac
if os.path.exists( os.path.join(self.main_folder, 'mplayer/mplayer.app') ):
self.mplayer = os.path.join(self.main_folder, 'mplayer/mplayer.app')
elif os.path.exists( os.path.join(self.main_folder, '/mplayer.app') ):
self.mplayer = os.path.join(self.main_folder, '/mplayer.app')
else:
self.mplayer = 'download'
else:# linux
# download or navigate to it
self.mplayer = 'download'
# Create Music Folder if it does not exist
if not os.path.exists(self.music_folder):
os.makedirs(self.music_folder)
# Create log file if it does not exist
if not os.path.exists(self.log_file):
l = open(self.log_file, 'wb')
l.close()
log.msg('Application Folder: %s' % self.main_folder)
log.msg('Log File: %s' % self.log_file)
log.msg('Music Folder: %s' % self.music_folder)
def __setattr__(self, variable, value):
log.msg('Setting %s to %s' % (variable, value))
object.__setattr__(self, variable, value)
if variable in self.config_variables:
try:
self.config.set('Settings', variable, value)
except:
# Means config wasn't created then, could be we were trying to set self.config (in which case self.config wasn't set yet because we were trying to set it)
log.err()
else:
# UPDATE settings file
log.msg('Saving Settings to %s' % (self.settings_file))
try:
self.config.write( open(self.settings_file, 'wb') )
except:
log.err()

Categories