According to Microsoft, DPAPI should be able to encrypt data on one machine, and decrypt it on another:
See: https://support.microsoft.com/en-us/topic/bf374083-626f-3446-2a9d-3f6077723a60#bkmk_6
When I am logged into a domain controller, and encrypt a file, I expect to be able to log out, transfer and decrypt it on another machine logged in on the same user.
However, I get this error:
error: (-2146893813, 'CryptProtectData', 'Key not valid for use in specified state.')
Which implies that the "roaming" didn't work. I'm assuming there are some group policy things I need to set to get those creds to roam properly.
Also, if there's a better way to do this (some other api to use the logged-in user's existing creds), I'm ok with that.
Here's the script I use to test:
import argparse
import os
import sys
from win32crypt import CryptProtectData, CryptUnprotectData
def dpapi_encrypt(fin, fout):
dat = fin.read()
fout.write(CryptProtectData(dat))
def dpapi_decrypt(fin, fout):
(_descr, dat) = CryptUnprotectData(fin.read())
if dat and dat[-1] == 0:
dat = dat[:-1]
fout.write(dat)
def do_fileop(file, op):
if file == "-":
fin = sys.stdin.buffer
fout = sys.stdout.buffer
op(fin, fout)
else:
with open(file, "rb") as fin:
tmp = file + ".dpapi-enc"
with open(tmp, "wb") as fout:
op(fin, fout)
os.replace(tmp, file)
def encrypt_file(file):
do_fileop(file, dpapi_encrypt)
def decrypt_file(file):
do_fileop(file, dpapi_decrypt)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("file")
parser.add_argument("--encrypt", "-e", action="store_true")
parser.add_argument("--decrypt", "-d", action="store_true")
args = parser.parse_args()
if args.encrypt:
encrypt_file(args.file)
elif args.decrypt:
decrypt_file(args.file)
else:
print("error: specify --encrypt or --decrypt", file=sys.stdout)
if __name__ == "__main__":
main()
Related
I have certain data in a json file (say, example.json),
example.json
data = {
'name' : 'Williams',
'working': False,
'college': ['NYU','SU','OU'],
'NYU' : {
'student' : True,
'professor': False,
'years' : {
'fresher' : '1',
'sophomore': '2',
'final' : '3'
}
}
}
I wish to write a code wherein I can give the arguments on Command line, i.e. suppose if a script is saved in a file 'script.py', then,
In the terminal: If I enter *$ python3* script.py --get name --get NYU.student Then it outputs name=Williams
NYU.student=True
If I enter *$ python3* script.py' --set name=Tom --set NYU.student=False
Then, it updates name and NYU.student keys in the dictionay to Tom and False and outputs NYU.student=Tom and NYU.student=False on the command line.
I have tried the following code for the python script (i.e. script.py)
script.py
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command")
parser.add_argument("--set", help="second command")
args=parser.parse_args()
with open('example.json','r') as read_file:
data=json.load(read_file)
if args.set == None:
key = ' '.join(args.get[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(now)
elif args.get == Null:
key, value = ' '.join(args.set[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
However, my script is not working as I expect it to? Kindly, help me with the script
Your code has the following issues:
When joining the argument values in line number 23 and 35, you use a space. This leads to the "Error key" value. Removing the space will solve the issue.
key = ''.join(arg[:])
You defined the arguments to only pass one value. Not multiple. Therefore even if you pass multiple --get or --set values, the script only gets one value. Adding action="append" to line number 9 and 10 will solve the issue.
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
Full code:
import json
import pprint
import argparse
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", help="first command", action="append")
parser.add_argument("--set", help="second command", action="append")
args=parser.parse_args()
try:
with open('example.json','r') as read_file:
data=json.load(read_file)
except IOError:
print("ERROR: File not found")
exit()
if args.set == None:
for arg in args.get:
key = ''.join(arg[:])
path = key.split('.')
now = data
for k in path:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg} = {now}")
elif args.get == None:
for arg in args.set:
key, value = ''.join(arg[:]).split('=')
path = key.split('.')
now = data
for k in path[:-1]:
if k in now:
now = now[k]
else:
print('Error: Invalid Key')
print(f"{arg}")
now[path[-1]] = value
with open('example.json','w') as write_file: #To write the updated data back to the same file
json.dump(data,write_file,indent=2)
here is the get part of the question, I hope that you can continue the set part of your assignment. good luck
python test.py --get name NYU.student
import json
import pprint
import argparse
def match(data: dict, filter: str):
current = data
for f in filter.split("."):
if f not in current:
return False
current = current[f]
return current == True
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--get", nargs="*", help="first command")
args = parser.parse_args()
with open('example.json','r') as f:
data = json.loads(f.read())
if args.get is not None and len(args.get) == 2:
attr_name = args.get[0]
if match(data, args.get[1]):
print("{}={}".format(attr_name, data[attr_name]))
In order to pass arguments using command line make use of sys module in python3. The sys module reads the command line arguments as a list of strings. The first element in the list is always the name of the file and subsequent elements are arg1, arg2 ..... so on.
Hope the following example helps to understand the usage of sys module.
Example Command :
python filename.py 1 thisisargument2 4
The corresponding code
import sys
# Note that all the command line args will be treated as strings
# Thus type casting will be needed for proper usage
print(sys.argv[0])
print(sys.argv[1])
print(sys.argv[2])
print(sys.argv[3])
Corresponding Output
filename.py
1
thisisargument2
4
Also please make a thorough google search before posting a question on stackoverflow.
The problem is that the process never goes through "loaded" JSON data from a file, and I do not understand why. It always goes through creating the new file each time.
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
with open(storage_path,'r') as f:
if f.seek(2) is not 2:
data_base = json.load(f)
print('loaded that: ',data_base)
else:
f.close()
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
if data_base.get(args.key, 'Not found') == 'Not found':
if args.val is not None:
data_base.setdefault(args.key, args.val)
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)
There are quite a few issues with your code, ie
program crashing if the file does not exist:
with open(storage_path,'r') as f:
opening storage_path for writing but actually not writing anything:
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
And actually if you happened to have f.seek(2) == 2, the json.load(f) would also crash since at this point you moved the file pointer at the 3rd char so subsequent read in json.load() wouldn't get the whole content.
Here's a fixed version that should work AFAICT:
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
data_base = None
if os.path.exists(storage_path):
with open(storage_path,'r') as f:
try:
data_base = json.load(f)
print('loaded that: ',data_base)
except Exception as e:
print("got %s on json.load()" % e)
if data_base is None:
print('each time I am creating the new one')
data_base = {}
with open(storage_path,'w') as f:
json.dump(data_base, f)
# don't prevent the user to set `"Not found" as value, if might
# be a legitimate value.
# NB : you don't check if `args.key` is actually set... maybe you should ?
sentinel = object()
if data_base.get(args.key, sentinel) is sentinel:
if args.val is not None:
data_base[args.key] = args.val
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)
I have a python script which is controlled by a config file called system.config .the structure of config file is like bellow with some default values.
[company]
companyname: XYZ
[profile]
name: ABC
joining: 1/1/2014
the code for config file is : config_parser_details.py
import ConfigParser
import sys
Config = ConfigParser.ConfigParser()
Config.read("system.config")
filename = "system.config"
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
company = ConfigSectionMap("company")['companyname']
name = ConfigSectionMap("profile")['name']
joindate = ConfigSectionMap("profile")['joining']
now the code for my script is : test.py
import config_parser_details as p
import sys
import warnings
import os
company = p.company
name = p.name
date = p.joindate
print("%s\n" %company)
print("%s\n" %name)
output is
XYZ
ABC
now I want to give input in the config file through command line.
like
python test.py --compname ="testing"
if any argument is missing in the command line than default value will be the input.
You could use argparse library to parse command line arguments.
So your test.py file looks like below :
import config_parser_details as p
import sys
import warnings
import os
import argparse
commandLineArgumentParser = argparse.ArgumentParser()
commandLineArgumentParser.add_argument("-c", "--compname", help="Company name", default=p.company)
commandLineArguments = commandLineArgumentParser.parse_args()
company = commandLineArguments.compname
name = p.name
date = p.joindate
print("%s\n" %company)
print("%s\n" %name)
I'd advise looking into a tool like docopt.
For a quick fix though, you can try doing this
def ConfigSectionMap(section):
options = Config.options(section)
arg_dict = {}
for command_line_argument in sys.argv[1:]:
arg = command_line_argument.split("=")
arg_dict[arg[0][2:]] = arg[1]
for key in arg_dict:
options[key] = arg_dict[key]
return options
This will load up all the default option. Any options put on the command line will override or add to the options dict.
First of all, I'd move code into a main section so that you can import config_parser_details without executing code:
if __name__ == '__main__':
main()
def main():
Config = ConfigParser.ConfigParser()
Config.read("system.config")
filename = "system.config"
company = ConfigSectionMap("company")['companyname']
name = ConfigSectionMap("profile")['name']
joindate = ConfigSectionMap("profile")['joining']
Secondly, I'd use STB land's suggestion of parsing the command line with argparse, something like:
def main():
# do the parsing thing first, then:
filename = args.filename
do_stuff(filename)
This way you can neatly use python's own unit test framework or nosetests to write test file that don't require you to manually specify parameters:
def test_basic():
# create a temporary file with tempfile.NamedTemporaryFile
tmpfile = tempfile.NamedTemporaryFile()
# add test data to tmpfile
do_stuff(tmpfile)
# check the output
assert ....
This comes with the added benefit of not having global variables, which will complicate your life later.
I create a dictionary and save it to a file using json. The code takes input and updates the dictionary regularly, but unfortunately I can't get it to write the dictionary properly.
Following is the code that I have written. Have a look in here :
import os, sys, pickle, re, json
from optparse import OptionParser
parser = OptionParser("Store Daily Intakes \n python [Options] <-h help>")
parser.add_option("-n", "--ndays", dest="ndays", action="store", type="int", help="Input the day")
parser.add_option("-m", "--morning", dest="morning", action="store", type="string", help="Input the morning intake format <Banana-1pc,Bread-1pc,CottageChees-2tbs>")
parser.add_option("-l", "--lunch", dest="lunch", action="store", type="string", help="Input the Lunch intake format <Rice-2tbs,Roti-1pc,ChickenCurry-2tbs,Dal-2tbs>")
parser.add_option("-a", "--afternoon", dest="afternoon", action="store", type="string", help="Input the afternoon intake format <Cornflakes-2tbs,Banana-1pc>")
parser.add_option("-d", "--dinner", dest="dinner", action="store", type="string", help="Input the dinner intake format <Pasta-20gms, Cheese-2slice>")
(options, args) = parser.parse_args()
if options.ndays is None or options.morning is None or options.lunch is None or options.afternoon is None or options.dinner is None :
print parser.print_help()
exit(-1)
if os.path.isfile("./DailyInTakeFile.json") is True :
jout = file('./DailyInTakeFile.json','r') # read mode
CurDct = json.load(jout)
print CurDct
DailyInTake = dict()
DailyInTake["%d" % options.ndays] = {}
din = DailyInTake["%s" % options.ndays]
din['Morning'] = options.morning
din['Lunch'] = options.lunch
din['Afternoon'] = options.afternoon
din['Dinner'] = options.dinner
saved = sys.stdout
ofile = file('DailyInTakeFile.json', 'a') # append mode
for idx in CurDct.keys() :
if int(idx) == options.ndays :
print idx, options.ndays
print "The Intake for day # %d exists" %options.ndays
print "Are you sure you want to overwrite: Type [yes/no]"
lett=sys.stdin.read()
if "yes" in lett :
CurDct[idx]['Morning'] = options.morning
CurDct[idx]['Lunch'] = options.lunch
CurDct[idx]['Afternoon'] = options.afternoon
CurDct[idx]['Dinner'] = options.dinner
ofile.close()
sys.exit("Exiting after updating day # %d" % options.ndays)
else :
ofile.close()
sys.exit("Exiting without update")
else :
sys.stdout = ofile
print json.dumps(DailyInTake)
print ","
sys.stdout = saved
ofile.close()
else :
DailyInTake = dict()
DailyInTake["%d" % options.ndays] = {}
din = DailyInTake["%s" % options.ndays]
din['Morning'] = options.morning
din['Lunch'] = options.lunch
din['Afternoon'] = options.afternoon
din['Dinner'] = options.dinner
#print DailyInTake
saved = sys.stdout
ofile = file('DailyInTakeFile.json', 'a') # append mode
sys.stdout = ofile
print json.dumps(DailyInTake)
print ","
sys.stdout = saved
ofile.close()
from datetime import date, timedelta
from subprocess import call
call("cp DailyInTakeFile.json DailyInTakeFile.json.%s" % str(date.today()), shell=True)
The output json file from this code is the following for example :
{"1": {"Lunch": "l3", "Dinner": "d3", "Afternoon": "a3", "Morning": "m3"}}
{"2": {"Lunch": "l3", "Dinner": "d3", "Afternoon": "a3", "Morning": "m3"}}
As you can see it is just adding a single dictionary each time rather than appending to the first one created. I just can't think it out anymore. Any help will be appreciated.
UPDATE WITH CODE THAT MAINLY CHANGED
saved = sys.stdout
for idx in CurDct.keys() :
if int(idx) == options.ndays :
print idx, options.ndays
print "The Intake for day # %d exists" %options.ndays
print "Are you sure you want to overwrite: Type [yes/no]"
lett=sys.stdin.read()
if "yes" in lett :
ofile = file('DailyInTakeFile.json', 'w') # write mode
sys.stdout = ofile
CurDct.update(DailyInTake)
print json.dumps(CurDct)
sys.stdout = saved
ofile.close()
sys.exit("Exiting after updating day # %d" % options.ndays)
else :
sys.exit("Exiting without update")
else :
ofile = file('DailyInTakeFile.json', 'w') # write mode
sys.stdout = ofile
CurDct.update(DailyInTake)
print json.dumps(CurDct)
sys.stdout = saved
ofile.close()
According to the code, you create a new dictionary every time. And don't append to the old one in the file. DailyInTake = dict() So output to the file, just appends a new dictionary.
My suggestion would be.
To add the new dictionary index to CurDct as CurDct[index] = DailyInTake[index], then dump the whole dictionary back to the file. You can open the file for writing other than appending.
Using JSON serialisation as a mutable data storage backend seems like a rather odd solution. Without looking at your code in detail, I suggest using one of the solutions that are meant to be used this way. The most suitable one for this case seems to be the shelve module.
I'm trying to use Python to processes some PDF forms that were filled out and signed using Adobe Acrobat Reader.
I've tried:
The pdfminer demo: it didn't dump any of the filled out data.
pyPdf: it maxed a core for 2 minutes when I tried to load the file with PdfFileReader(f) and I just gave up and killed it.
Jython and PDFBox: got that working great but the startup time is excessive, I'll just write an external utility in straight Java if that's my only option.
I can keep hunting for libraries and trying them but I'm hoping someone already has an efficient solution for this.
Update: Based on Steven's answer I looked into pdfminer and it did the trick nicely.
from argparse import ArgumentParser
import pickle
import pprint
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdftypes import resolve1, PDFObjRef
def load_form(filename):
"""Load pdf form contents into a nested list of name/value tuples"""
with open(filename, 'rb') as file:
parser = PDFParser(file)
doc = PDFDocument(parser)
return [load_fields(resolve1(f)) for f in
resolve1(doc.catalog['AcroForm'])['Fields']]
def load_fields(field):
"""Recursively load form fields"""
form = field.get('Kids', None)
if form:
return [load_fields(resolve1(f)) for f in form]
else:
# Some field types, like signatures, need extra resolving
return (field.get('T').decode('utf-16'), resolve1(field.get('V')))
def parse_cli():
"""Load command line arguments"""
parser = ArgumentParser(description='Dump the form contents of a PDF.')
parser.add_argument('file', metavar='pdf_form',
help='PDF Form to dump the contents of')
parser.add_argument('-o', '--out', help='Write output to file',
default=None, metavar='FILE')
parser.add_argument('-p', '--pickle', action='store_true', default=False,
help='Format output for python consumption')
return parser.parse_args()
def main():
args = parse_cli()
form = load_form(args.file)
if args.out:
with open(args.out, 'w') as outfile:
if args.pickle:
pickle.dump(form, outfile)
else:
pp = pprint.PrettyPrinter(indent=2)
file.write(pp.pformat(form))
else:
if args.pickle:
print(pickle.dumps(form))
else:
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(form)
if __name__ == '__main__':
main()
You should be able to do it with pdfminer, but it will require some delving into the internals of pdfminer and some knowledge about the pdf format (wrt forms of course, but also about pdf's internal structures like "dictionaries" and "indirect objects").
This example might help you on your way (I think it will work only on simple cases, with no nested fields etc...)
import sys
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdftypes import resolve1
filename = sys.argv[1]
fp = open(filename, 'rb')
parser = PDFParser(fp)
doc = PDFDocument(parser)
fields = resolve1(doc.catalog['AcroForm'])['Fields']
for i in fields:
field = resolve1(i)
name, value = field.get('T'), field.get('V')
print '{0}: {1}'.format(name, value)
EDIT: forgot to mention: if you need to provide a password, pass it to doc.initialize()
Python 3.6+:
pip install PyPDF2
# -*- coding: utf-8 -*-
from collections import OrderedDict
from PyPDF2 import PdfFileWriter, PdfFileReader
def _getFields(obj, tree=None, retval=None, fileobj=None):
"""
Extracts field data if this PDF contains interactive form fields.
The *tree* and *retval* parameters are for recursive use.
:param fileobj: A file object (usually a text file) to write
a report to on all interactive form fields found.
:return: A dictionary where each key is a field name, and each
value is a :class:`Field<PyPDF2.generic.Field>` object. By
default, the mapping name is used for keys.
:rtype: dict, or ``None`` if form data could not be located.
"""
fieldAttributes = {'/FT': 'Field Type', '/Parent': 'Parent', '/T': 'Field Name', '/TU': 'Alternate Field Name',
'/TM': 'Mapping Name', '/Ff': 'Field Flags', '/V': 'Value', '/DV': 'Default Value'}
if retval is None:
retval = OrderedDict()
catalog = obj.trailer["/Root"]
# get the AcroForm tree
if "/AcroForm" in catalog:
tree = catalog["/AcroForm"]
else:
return None
if tree is None:
return retval
obj._checkKids(tree, retval, fileobj)
for attr in fieldAttributes:
if attr in tree:
# Tree is a field
obj._buildField(tree, retval, fileobj, fieldAttributes)
break
if "/Fields" in tree:
fields = tree["/Fields"]
for f in fields:
field = f.getObject()
obj._buildField(field, retval, fileobj, fieldAttributes)
return retval
def get_form_fields(infile):
infile = PdfFileReader(open(infile, 'rb'))
fields = _getFields(infile)
return OrderedDict((k, v.get('/V', '')) for k, v in fields.items())
if __name__ == '__main__':
from pprint import pprint
pdf_file_name = 'FormExample.pdf'
pprint(get_form_fields(pdf_file_name))
The Python PyPDF2 package (successor to pyPdf) is very convenient:
import PyPDF2
f = PyPDF2.PdfReader('form.pdf')
ff = f.get_fields()
Then ff is a dict that contains all the relevant form information.
Quick and dirty 2-minute job; just use PDFminer to convert PDF to xml and then grab all of the fields.
from xml.etree import ElementTree
from pprint import pprint
import os
def main():
print "Calling PDFDUMP.py"
os.system("dumppdf.py -a FILE.pdf > out.xml")
# Preprocess the file to eliminate bad XML.
print "Screening the file"
o = open("output.xml","w") #open for append
for line in open("out.xml"):
line = line.replace("&#", "Invalid_XML") #some bad data in xml for formatting info.
o.write(line)
o.close()
print "Opening XML output"
tree = ElementTree.parse('output.xml')
lastnode = ""
lastnode2 = ""
list = {}
entry = {}
for node in tree.iter(): # Run through the tree..
# Check if New node
if node.tag == "key" and node.text == "T":
lastnode = node.tag + node.text
elif lastnode == "keyT":
for child in node.iter():
entry["ID"] = child.text
lastnode = ""
if node.tag == "key" and node.text == "V":
lastnode2 = node.tag + node.text
elif lastnode2 == "keyV":
for child in node.iter():
if child.tag == "string":
if entry.has_key("ID"):
entry["Value"] = child.text
list[entry["ID"]] = entry["Value"]
entry = {}
lastnode2 = ""
pprint(list)
if __name__ == '__main__':
main()
It isn't pretty, just a simple proof of concept. I need to implement it for a system I'm working on so I will be cleaning it up, but I thought I would post it in case anyone finds it useful.
Update for latest version of pdf miner (change import and parser/doc setup in first function)
from argparse import ArgumentParser
import pickle
import pprint
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdftypes import resolve1
from pdfminer.pdftypes import PDFObjRef
def load_form(filename):
"""Load pdf form contents into a nested list of name/value tuples"""
with open(filename, 'rb') as file:
parser = PDFParser(file)
doc = PDFDocument(parser)
parser.set_document(doc)
#doc.set_parser(parser)
doc.initialize()
return [load_fields(resolve1(f)) for f in
resolve1(doc.catalog['AcroForm'])['Fields']]
def load_fields(field):
"""Recursively load form fields"""
form = field.get('Kids', None)
if form:
return [load_fields(resolve1(f)) for f in form]
else:
# Some field types, like signatures, need extra resolving
return (field.get('T').decode('utf-8'), resolve1(field.get('V')))
def parse_cli():
"""Load command line arguments"""
parser = ArgumentParser(description='Dump the form contents of a PDF.')
parser.add_argument('file', metavar='pdf_form',
help='PDF Form to dump the contents of')
parser.add_argument('-o', '--out', help='Write output to file',
default=None, metavar='FILE')
parser.add_argument('-p', '--pickle', action='store_true', default=False,
help='Format output for python consumption')
return parser.parse_args()
def main():
args = parse_cli()
form = load_form(args.file)
if args.out:
with open(args.out, 'w') as outfile:
if args.pickle:
pickle.dump(form, outfile)
else:
pp = pprint.PrettyPrinter(indent=2)
file.write(pp.pformat(form))
else:
if args.pickle:
print pickle.dumps(form)
else:
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(form)
if __name__ == '__main__':
main()
I created a library to do this:
pip install fillpdf
from fillpdf import fillpdfs
fillpdfs.get_form_fields("ex.pdf")
Credit to dvska's answer, for basis of library code.
There is a typo on these lines:
file.write(pp.pformat(form))
Should be:
outfile.write(pp.pformat(form))