I have a method to print some values to a file.
from typing import TextIO
def print_to_file(file: TextIO, value: str) -> None:
print(value, file=file)
Here is my current unittest but it will got PermissionError when running unittest on online server. What is the correct way to test it?
def test_print_to_file(self):
file_name = 'temp_file.txt'
value = '123'
expected_content = '123\n'
with open(file_name, 'w') as file:
print_to_file(file, value)
with open(file_name, 'r') as file:
self.assertEqual(file.read(), expected_content)
Try adding + in w & r & try
def test_print_to_file(self):
file_name = 'temp_file.txt'
value = '123'
expected_content = '123\n'
with open(file_name, 'w+') as file:
print_to_file(file, value)
with open(file_name, 'r+') as file:
self.assertEqual(file.read(), expected_content)
I got it fixed via using tempfile.
import tempfile
def test_print_to_file(self):
value = '123'
expected_content = '123\n'
with tempfile.TemporaryFile('w+t') as file:
print_to_file(file, value)
file.seek(0)
self.assertEqual(file.read(), expected_content )
Related
text_list = []
with open("word5Dict.txt", "r") as f:
for line in f:
for word in line.split("#"):
if word != "\n" :
text_list.append(word)
f.close()
new_file = open("new_text_file", "w")
for word in text_list:
new_file.write(word)
new_file.write("\n")
class ScrabbleDict:
def __init__(self, size, filename):
self.dictionary = {}
with open(filename,"r") as f:
for line in f:
key = line.replace("\n","")
if len(key) == size:
self.dictionary[key] = key
I tend to create a new file by reading from another one, but when I try to use this file in the class I created, the sys returns: No such file or directory: 'new_text_file.txt'
new_file = open("new_text_file", "w")
does not automatically add the file extension. Use
new_file = open("new_text_file.txt", "w")
instead.
as said, I'd like to open a json file and make it into a list, in order to append new elements to it and then dump all back into the json file.
Here is my code(the commented part is what I previously tried):
class Carta:
def __init__(self,filename):
self.__filename = filename
self.__lista = []
# try:
# f = open(self.__filename,"r")
# except:
# f = open(self.__filename, "w")
# f.close()
# f = open(self.__filename, "r")
with open(self.__filename) as file:
self.__lista = json.load(file)
# read=json.load(f)
# for c in leggi:
# self.__lista.append(c)
# print(self.__lista)
# f.close()
def add(self, c):
self.__lista.append(c)
def save(self):
f = open(self.__filename, "w")
for c in self.__lista:
f.write("%s\n" % str(c))
f.close()
It wouldn't work if you read from a JSON file, json list and then write custom string. Because next time you read the JSON file it's gonna fail.
So, during write/save you should make it json itself. Here's the code the explains how to do it.
import json
class Carta:
def __init__(self, filename):
self.__filename = filename
self.__lista = list()
self.read_from_json_file()
def read_from_json_file(self):
with open(self.__filename) as file:
self.__lista = json.load(file)
def write_to_json_file(self):
with open(self.__filename, 'w') as f:
json.dump(self.__lista, f)
def add(self, value):
self.__lista.append(value)
The reason you should use with open(filename, mode) as f: instead of f = open(filename) is because at the end of with block the file is automatically closed. Otherwise you've to call f.close() every time you open a file.
json.load - reads json data from file, converts to python data type/structure.
json.dump - read python data type/structure, converts it into string and stores it in the file (file handle) and saves the file.
Using pdb to trace errors
import json
import pdb
class Carta:
def __init__(self, filename):
self.__filename = filename
self.__lista = list()
self.read_from_json_file()
def read_from_json_file(self):
pdb.set_trace() # to pause execution and start debugger
# When paused,
# type n to continue to next line,
# type c to continue execution or to continue to the next loop
# type b <file_name>:<line_number> to add another break point, where <file_name> and <line_number> are place holders
# Example, b /home/username/hello.py:43, will add breakpoint at 43 line of hello.py in /home/username path
# type q to quit debugger and halt execution
with open(self.__filename) as file:
self.__lista = json.load(file)
def write_to_json_file(self):
with open(self.__filename, 'w') as f:
json.dump(self.__lista, f)
def add(self, value):
# Second breakpoint
pdb.set_trace()
self.__lista.append(value)
Or just run your file with
python -m pdb file.py and then add breakpoints. It will pause in the first line itself and return you a (pdb) console where you can add breakpoint.
import json
#read from file
with open("demofile.txt", "r") as f: x = f.read()
#parse
y = json.loads(x)
#edit
y["user"] = { "fname": "John", "lname": "Who"}
#save to file
with open("demofile.txt", "w") as f: f.write(json.dumps(y))
https://repl.it/#KrzysztofPecyna/PythonJsonExample
To read JSON from a file:
import json
with open('data.txt') as json_file:
data = json.load(json_file)
To add new data:
data['key'] = "value"
To write JSON to a file:
with open('data.txt', 'w') as outfile:
json.dump(data, outfile)
I am having some issues passing an argument in a python script to take a specific file like a csv, txt, or xml
I am reviewing python and would like some feedback on why I don't see any output after running the following command: ./my_script some3455.csv
#!/usr/bin/python
import sys
import csv
import xml.etree.ElementTree as ET
FILE = str(sys.argv[1])
def run_files():
if FILE == '*.csv'
run_csv()
elif FILE == '*.txt'
run_txt()
else
run_xml()
def run_csv():
csv_file = csv.register_dialect('dialect', delimiter = '|')
with open(FILE, 'r') as file:
reader = csv.reader(file, dialect='dialect')
for row in reader:
print(row)
def run_txt():
with open(FILE, 'r') as file:
txt_contents = file.read()
print(txt_contents)
def run_xml():
tree = ET.parse(FILE)
root = tree.getroot()
for child in root.findall('Attributes')
car = child.find('Car').text
color = child.find('Color').text
print(car, color)
I have tried to pass it as without the FILE but works just for one and the other file types doesn't get identify.
You need to use fnmatch and not == to compare a string with a glob pattern:
import fnmatch
def run_files():
if fnmatch.fnmatch(FILE, '*.csv'):
run_csv()
elif fnmatch.fnmatch(FILE, '*.txt'):
run_txt()
else:
run_xml()
The problem is that the process never goes through "loaded" JSON data from a file, and I do not understand why. It always goes through creating the new file each time.
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
with open(storage_path,'r') as f:
if f.seek(2) is not 2:
data_base = json.load(f)
print('loaded that: ',data_base)
else:
f.close()
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
if data_base.get(args.key, 'Not found') == 'Not found':
if args.val is not None:
data_base.setdefault(args.key, args.val)
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)
There are quite a few issues with your code, ie
program crashing if the file does not exist:
with open(storage_path,'r') as f:
opening storage_path for writing but actually not writing anything:
print('each time I am creating the new one')
with open(storage_path,'w') as f:
data_base = {}
f.close()
And actually if you happened to have f.seek(2) == 2, the json.load(f) would also crash since at this point you moved the file pointer at the 3rd char so subsequent read in json.load() wouldn't get the whole content.
Here's a fixed version that should work AFAICT:
import argparse
import os
import tempfile
import json
storage = argparse.ArgumentParser()
storage.add_argument("--key", help="input key's name")
storage.add_argument("--val", help="value of key", default=None)
args = storage.parse_args()
storage_path = os.path.join(tempfile.gettempdir(), 'storage.data')
data_base = None
if os.path.exists(storage_path):
with open(storage_path,'r') as f:
try:
data_base = json.load(f)
print('loaded that: ',data_base)
except Exception as e:
print("got %s on json.load()" % e)
if data_base is None:
print('each time I am creating the new one')
data_base = {}
with open(storage_path,'w') as f:
json.dump(data_base, f)
# don't prevent the user to set `"Not found" as value, if might
# be a legitimate value.
# NB : you don't check if `args.key` is actually set... maybe you should ?
sentinel = object()
if data_base.get(args.key, sentinel) is sentinel:
if args.val is not None:
data_base[args.key] = args.val
with open(storage_path, 'w') as f:
json.dump(data_base, f)
print('dumped this: ',data_base)
I have to create a save function and a load function that saves a dictionary in the format of:
123;Kalle;
123;Maria;
321;Anna;
321;Olle;
My dictionary is supposed to look like a phonebook, with the key being the name and the value is the phonenumber:
telebook = {"jacob":"8472923777", "nisse":"092563243"}
How can I write a function that saves my phonebook in the format mentioned? It should look like this:
8472923777;jacob;
This is my current code:
def save(lista, telebook):
import pickle
filename = lista[1]
f = open(filename, "w")
pickle.dump(telebook, f)
f.close()
print telebook
def load(lista, telebook):
import pickle
try:
filename = lista[1]
f = open(filename, "r")
telebook_1 = pickle.load( f )
telebook.clear()
telebook.update(telebook_1)
f.close()
print telebook
except:
print "This file doesn't exist"
EDIT:
My save function was easier than I thought, managed to solve it on my own. Not sure how to get the load function to work though.
book = raw_input("telebook> ").lower()
lista = book.split()
def save(lista, telebook):
filename = lista[1]
f = open(filename, "w")
for name, num in telebook.items():
f.write(num+";"+name+";"+"\n")
f.close()
print telebook
My load is the same as before but obviously I can't use that one anymore.
def save(telebok, filepath):
with open(filepath, 'w') as outfile:
for name,num in telebok.items():
outfile.write("{};{};\n".format(num, name))
And to get it back:
import csv
def load(filepath):
with open(filepath) as infile:
telebok = dict((v,k) for v,k,_ in csv.reader(infile, delimiter=';'))
return telebok