I have the following files in a python pipeline
#in_para.py
nx = 31
c = 1
#solver.py
import numpy
import os
def simple(output):
ny = nx + 5
c_2 = c + 2
with open(os.path.join(output, 'Output.txt'), 'w') as f:
print("Hello stackoverflow!", file=f)
print("I have a question.", file=f)
if __name__=='__main__':
simple()
#main.py
import os
import numpy
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-C','--Chk',type=str, help='Choose arg')
parser.add_argument('-O','--output',type=str, default="./Output", help=' Provide the output Directory')
args = vars(parser.parse_args())
output = args['output']
if not os.path.exists(output):
os.makedirs(output)
if args['Chk'] == 'compo1':
simple(output)
if __name__=='__main__':
main()
I would like to call the input file in_para.py through command line argument such that
python3 main.py -I in_para -C compo1 -O Ouput_dir
gives me the desired output:
this is a simple test 36
this is a simple test2 3
I know if I do from in_para import * it will solve the problem (in a different manner), but I would like to call the input parameters from the command line as a positional argument and pass it on to solver.py. I haven't been able to find an example in the documentation similar to above task. Besides, the above is just an example, in_para.py and solver.py has several input parameters and several lines of code, so I dont want the user of main.py to go into either file and modify it.
Change the implementation in solver.py
def simple(nx, c, output): # provide parameters as function arguments
ny = nx + 5
c_2 = c + 2
print("this is a simple test", ny)
print("this is a simple test2", c_2)
with open(os.path.join(output, 'Output.txt'), 'w') as f:
print("Hello stackoverflow!", file=f)
print("I have a question.", file=f)
In the main parse the parameters file
... # other imports
import json
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-C','--Chk', type=str, help='Choose arg')
parser.add_argument('-I','--input', type=argparse.FileType("r"), help='Input parameters')
parser.add_argument('-O','--output', type=str, help='output dir')
args = parser.parse_args()
output = args.output
# implement any way of parsing the input file, e.g. a json file
params = json.load(args.input)
... # do something else with the arguments
if args.Chk == 'compo1':
simple(output=output, **params)
... # further code lines
Your inputs file would look like
{"nx": 31, "c": 1}
Comment on edit:
Either you use args = vars(parser.parse_args()) which results in a dictionary or you just use the args = parser.parse_args() to select arguments by attribute. In your original post you used the first approach which I adopted in the first answer. However, I would prefer not using vars.
Note, that the code snippet to create the directory does not add value to the actual question of the post, so I removed it to avoid any distraction and added some placeholder instead.
Related
I have certain data in a json file (say, example.json),
example.json
data = {
'name' : 'Williams',
'working': False,
'college': ['NYU','SU','OU'],
'NYU' : {
'student' : True,
'professor': False,
'years' : {
'fresher' : '1',
'sophomore': '2',
'final' : '3'
}
}
}
I wish to write a code wherein I can give the arguments on Command line, i.e. suppose if a script is saved in a file 'script.py', then,
In the terminal: If I enter *$ python3* script.py --get name --get NYU.student Then it outputs name=Williams
NYU.student=True
If I enter *$ python3* script.py' --set name=Tom --set NYU.student=False
Then, it updates name and NYU.student keys in the dictionay to Tom and False and outputs NYU.student=Tom and NYU.student=False on the command line.
I have tried the following code for the python script (i.e. script.py)
script.py
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command")
parser.add_argument("--set", help="second command")
args=parser.parse_args()
with open('example.json','r') as read_file:
data=json.load(read_file)
if args.set == None:
key = ' '.join(args.get[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(now)
elif args.get == Null:
key, value = ' '.join(args.set[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
However, my script is not working as I expect it to? Kindly, help me with the script
Your code has the following issues:
When joining the argument values in line number 23 and 35, you use a space. This leads to the "Error key" value. Removing the space will solve the issue.
key = ''.join(arg[:])
You defined the arguments to only pass one value. Not multiple. Therefore even if you pass multiple --get or --set values, the script only gets one value. Adding action="append" to line number 9 and 10 will solve the issue.
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
Full code:
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
args=parser.parse_args()
try:
with open('example.json','r') as read_file:
data=json.load(read_file)
except IOError:
print("ERROR: File not found")
exit()
if args.set == None:
for arg in args.get:
key = ''.join(arg[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg} = {now}")
elif args.get == None:
for arg in args.set:
key, value = ''.join(arg[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg}")
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
here is the get part of the question, I hope that you can continue the set part of your assignment. good luck
python test.py --get name NYU.student
import json
import pprint
import argparse
def match(data: dict, filter: str):
current = data
for f in filter.split("."):
if f not in current:
return False
current = current[f]
return current == True
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", nargs="*", help="first command")
args = parser.parse_args()
with open('example.json','r') as f:
data = json.loads(f.read())
if args.get is not None and len(args.get) == 2:
attr_name = args.get[0]
if match(data, args.get[1]):
print("{}={}".format(attr_name, data[attr_name]))
In order to pass arguments using command line make use of sys module in python3. The sys module reads the command line arguments as a list of strings. The first element in the list is always the name of the file and subsequent elements are arg1, arg2 ..... so on.
Hope the following example helps to understand the usage of sys module.
Example Command :
python filename.py 1 thisisargument2 4
The corresponding code
import sys
# Note that all the command line args will be treated as strings
# Thus type casting will be needed for proper usage
print(sys.argv[0])
print(sys.argv[1])
print(sys.argv[2])
print(sys.argv[3])
Corresponding Output
filename.py
1
thisisargument2
4
Also please make a thorough google search before posting a question on stackoverflow.
I am trying to parse command line arguments and I have never done it before. I have tried a few things. The code below is the last thing that I tried, but I received the error: "unrecognized arguments". I want to be able to put something like
copy-column-csv.py cname=Quiz source=source.csv target=target.csv out=out.csv
on the command line, but also have the ability to have it in any order. I am not sure how to go about this. Below is my code:
import argparse
import sys, re
import numpy as np
import smtplib
from random import randint
import csv
import math
import pandas as pd
parser = argparse.ArgumentParser()
parser.add_argument('-cname')
parser.add_argument('-source')
parser.add_argument('-target')
parser.add_argument('-out')
args = parser.parse_args()
#col = sys.argv[1]
#source = sys.argv[2]
#target = sys.argv[3]
#newtarg = sys.argv[4]
sourceFile = pd.read_csv(source)
targetFile = pd.read_csv(target)
del targetFile[cname]
targetFile[col] = pd.Series(sourceFile[col])
targetFile.to_csv(out, index = False)
Assuming well formed arguments, you could split sys.argv up into a dictionary:
args_dict = {}
for arg in sys.argv[1:]:
split = arg.split('=')
args_dict[split[0]] = split[1]
args_dict will look like this:
{'cname': 'Quiz',
'out': 'out.csv',
'source': 'source.csv',
'target': 'target.csv'}
And you can access the elements like so:
print args_dict['cname']
print args_dict['out']
print args_dict['source']
print args_dict['target']
Here is an example of how I'm doing it.
from optparse import OptionParser
from optparse import OptionGroup
parser = OptionParser(usage)
required = OptionGroup(parser, "Required Arguments")
required.add_option("--genome", dest="genome_file", help="File representing genome. FASTA nucleotide format. ")
required.add_option("--anno", dest="anno_file", help="File containing genome annotation information in GTF/GFF3 format. ")
required.add_option("--output", dest="prefix", help="Creates a folder named with the supplied prefix containing output files. ")
parser.add_option_group(required)
if len(args) != 0 or not options.genome_file or not options.anno_file or not options.prefix:
parser.error("Required arguments have not been supplied. \n\t\tUse -h to get more information.")
sys.exit()
You'll pass the arguments when you run it, for example, with --genome=myfile.txt, and in the code that value becomes options.genome_file.
Can I configure python to have matlab like print, so that when I just have a function
returnObject()
that it simply prints that object without me having to type print around it? I assume this is not easy, but something like if an object does not get bound by some other var it should get printed, so that this would work.
a = 5 #prints nothing
b = getObject() #prints nothing
a #prints 5
b #prints getObject()
getObject() #prints the object
If you use an ipython notebook individual cells work like this. But you can only view one object per cell by typing the objects name. To see multiple objects you'd need to call print, or use lots of cells.
You could write a script to modify the original script based on a set of rules that define what to print, then run the modified script.
A basic script to do this would be:
f = open('main.py', 'r')
p = open('modified.py', 'w')
p.write('def main(): \n')
for line in f:
temp = line
if len(temp) == 1:
temp = 'print(' + line + ')'
p.write('\t' + temp)
p.close()
from modified import main
main()
The script main.py would then look like this:
x = 236
x
output:
236
Idea is as follows: parse AST of Python code, replace every expression with call to print and content of expression as argument and then run the modified version. I'm not sure whether it works with every code, but you might try. Save it as matlab.py and run your code as python3 -m matlab file.py.
#!/usr/bin/env python3
import ast
import os
import sys
class PrintAdder(ast.NodeTransformer):
def add_print(self, node):
print_func = ast.Name("print", ast.Load())
print_call = ast.Call(print_func, [node.value], [])
print_statement = ast.Expr(print_call)
return print_statement
def visit_Expr(self, node):
if isinstance(node.value, ast.Call) and node.value.func.id == 'print':
return node
return self.add_print(node)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('infile', type=argparse.FileType(), nargs='?', default='-')
args = parser.parse_args()
with args.infile as infile:
code = infile.read()
file_name = args.infile.name
tree = ast.parse(code, file_name, 'exec')
tree = PrintAdder().visit(tree)
tree = ast.fix_missing_locations(tree)
bytecode = compile(tree, file_name, 'exec')
exec(bytecode)
if __name__ == '__main__':
main()
I need to overwrite json file parameters to a python dictionary through command line argument parser. Since, json file is located in the current working directory but its name can be dynamic , so i want something like below :-
python python_script --infile json_file
python_script:
if __name__ == "__main__":
profileInfo = dict()
profileInfo['profile'] = "enterprisemixed"
profileInfo['nodesPerLan'] = 50
json_file:
{
"profile":"adhoc",
"nodesPerLan" : 4
}
I tried to add the following lines, but don't know how to load this json data to the python dictionary :-
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--infile', nargs = 1, help="JSON file to be processed",type=argparse.FileType('r'))
arguments = parser.parse_args()
Read the JSON file with the name given to --infile and update your profileInfo:
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--infile', nargs=1,
help="JSON file to be processed",
type=argparse.FileType('r'))
arguments = parser.parse_args()
# Loading a JSON object returns a dict.
d = json.load(arguments.infile[0])
profileInfo = {}
profileInfo['profile'] = "enterprisemixed"
profileInfo['nodesPerLan'] = 50
print(profileInfo)
# Overwrite the profileInfo dict
profileInfo.update(d)
print(profileInfo)
I have a python script which is controlled by a config file called system.config .the structure of config file is like bellow with some default values.
[company]
companyname: XYZ
[profile]
name: ABC
joining: 1/1/2014
the code for config file is : config_parser_details.py
import ConfigParser
import sys
Config = ConfigParser.ConfigParser()
Config.read("system.config")
filename = "system.config"
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
company = ConfigSectionMap("company")['companyname']
name = ConfigSectionMap("profile")['name']
joindate = ConfigSectionMap("profile")['joining']
now the code for my script is : test.py
import config_parser_details as p
import sys
import warnings
import os
company = p.company
name = p.name
date = p.joindate
print("%s\n" %company)
print("%s\n" %name)
output is
XYZ
ABC
now I want to give input in the config file through command line.
like
python test.py --compname ="testing"
if any argument is missing in the command line than default value will be the input.
You could use argparse library to parse command line arguments.
So your test.py file looks like below :
import config_parser_details as p
import sys
import warnings
import os
import argparse
commandLineArgumentParser = argparse.ArgumentParser()
commandLineArgumentParser.add_argument("-c", "--compname", help="Company name", default=p.company)
commandLineArguments = commandLineArgumentParser.parse_args()
company = commandLineArguments.compname
name = p.name
date = p.joindate
print("%s\n" %company)
print("%s\n" %name)
I'd advise looking into a tool like docopt.
For a quick fix though, you can try doing this
def ConfigSectionMap(section):
options = Config.options(section)
arg_dict = {}
for command_line_argument in sys.argv[1:]:
arg = command_line_argument.split("=")
arg_dict[arg[0][2:]] = arg[1]
for key in arg_dict:
options[key] = arg_dict[key]
return options
This will load up all the default option. Any options put on the command line will override or add to the options dict.
First of all, I'd move code into a main section so that you can import config_parser_details without executing code:
if __name__ == '__main__':
main()
def main():
Config = ConfigParser.ConfigParser()
Config.read("system.config")
filename = "system.config"
company = ConfigSectionMap("company")['companyname']
name = ConfigSectionMap("profile")['name']
joindate = ConfigSectionMap("profile")['joining']
Secondly, I'd use STB land's suggestion of parsing the command line with argparse, something like:
def main():
# do the parsing thing first, then:
filename = args.filename
do_stuff(filename)
This way you can neatly use python's own unit test framework or nosetests to write test file that don't require you to manually specify parameters:
def test_basic():
# create a temporary file with tempfile.NamedTemporaryFile
tmpfile = tempfile.NamedTemporaryFile()
# add test data to tmpfile
do_stuff(tmpfile)
# check the output
assert ....
This comes with the added benefit of not having global variables, which will complicate your life later.