Python mocking open and check for close - python

I am trying to mock open and want to check if close gets called at least once
class MyObject():
def __init__(self,path):
fp = open(path)
self.file_list = []
for line in fp:
self.file_list.append(line.strip())
fp.close()
def testsimpleFile():
fake_file = io.StringIO("data.csv\ndata2.csv")
with patch("builtins.open",return_value=fake_file,create=True) as mock_file:
f = MyObject("path/to/open/test.f")
mock_file.assert_called_once_with("/path/to/open/test.f")
golden_list = ["data.csv","data2.csv"]
assert f.file_list == golden_list
This is my working testcode until now and now i want to additionally check if the the close method was called i tried to add
mock_file.close.assert_called_once()
and
mock_file.fake_file.close.assert_called_once()
but both will not catch the method call.

The short of it is: You can't track that the function is being called with assert_called_once if the return value of open isn't a mock object. So, instead of making the return value a StringIO we can make it a MagicMock that will act like a file handle.
import io
from unittest.mock import patch, MagicMock
class MyObject():
def __init__(self,path):
fp = open(path)
self.file_list = []
for line in fp:
self.file_list.append(line.strip())
fp.close()
def testsimpleFile():
fake_file = MagicMock()
fake_file.__iter__.return_value = ["data.csv", "data2.csv"]
with patch("builtins.open", return_value=fake_file, create=True) as mock_file:
f = MyObject("/path/to/open/test.f")
mock_file.assert_called_once_with("/path/to/open/test.f")
golden_list = ["data.csv", "data2.csv"]
assert f.file_list == golden_list
fake_file.close.assert_called_once()

Related

print key into txt file

So I am working on a script to generate serialnumbers for a product. I want to make a txt file where I the script prints the generated key. somehow it cant print in there but I don't know what I need to changes about it.
key = Key('aaaa-bbbb-cccc-dddd-1111')
fh = open('key.txt')
fh.write(Key)
Ok, based on your response, I've mocked up the Key class as follows. Without more information, it's not possible to give you a definitive answer, but hopefully this helps!
class Key:
def __init__(self, serial):
self.serial = serial
def process_serial(self):
# Your processing here
...
return processed_serial # This should be a string
Then to write to file, you can do:
key = Key('aaaa-bbbb-cccc-dddd-1111')
with open('key.txt', 'w') as f:
f.write(key.process_serial())
Alternatively, you can add a __str__ method to your class, which will specify what happens when you call the Python builtin str on your object.
class Key:
def __init__(self, serial):
self.serial = serial
def __str__(self):
out = ... # construct what you want to write to file
return out
Giving:
key = Key('aaaa-bbbb-cccc-dddd-1111')
with open('key.txt', 'w') as f:
f.write(str(key))
You might also consider adding this as a method to your Key class
class Key:
__init__(self, serial):
self.serial = serial
def process_serial(self):
# Your processing here
...
return processed_serial # This should be a string
def write(self, file_name):
with open(file_name, 'w') as f:
f.write(self.process_serial)
Try:
key = "Key('aaaa-bbbb-cccc-dddd-1111')"
fh = open('key.txt', "w")
fh.write(key)
To generate a text file that doesn't already exist you need to use "w" .
Try doing:
key = Key('aaaa-bbbb-cccc-dddd-1111')
with open('key.txt', 'w') as fh:
fh.write(key)
Hope that Helps!
Note: it must be in the with ... so it writes, if its not there the file is considered as closed.

How to write two decorator in file operations

I have csv file having contents below
101,item_1
101,item_1
if it is csv my below code will execute
import csv
fName = input()
def read_csv(fName):
try:
with open(fName, 'r') as f:
reader = csv.reader(f)
for row in reader:
print (row)
read_csv(fName)
Here how to write the exception in decorator function and call on the top of that.
first decorator
if fName not endswith .txt or .csv then it has to generate output not accept
Second decorator
if fName = file.txt text file then below operations has to taken care
def read_txt(fName):
f = open(fName, "r")
print(f.readline())
if csv then first function to execute and if txt next function to execute. How to achieve using decorator. I can put if conditon to achieve the situation, but that is not the case
My whole code without decorator is below
fName = input()
def read_csv(fName):
if fName.endswith('.csv'):
#print ('hi')
try:
with open(fName, 'r') as f:
reader = csv.reader(f)
for row in reader:
print (row)
except IOError:
print ("Could not read file:", fName)
#SECOND DECORATOR
if fName.endswith('.txt'):
f = open(fName, "r")
print(f.readline())
#FIRST DECORATOR
if not(fName.endswith('.csv')) and not(fName.endswith('.txt')):
print ('not accept')
read_csv(fName)
You can do it like this with decorators:
import functools
def check_arguments(func):
#functools.wraps(func)
def wrapper(*args, **kwargs):
fname = kwargs['fname']
if not fname.endswith('.csv') and not fname.endswith('.txt'):
print('not accept')
return func(*args, **kwargs)
return wrapper
def set_file_processor(func):
def read_csv(fname):
print('read_csv', fname)
def read_txt(fname):
print('read_txt', fname)
#functools.wraps(func)
def wrapper(*args, **kwargs):
fname = kwargs['fname']
if fname.endswith('.csv'):
read_csv(fname)
elif fname.endswith('.txt'):
read_txt(fname)
return func(*args, **kwargs)
return wrapper
#check_arguments
#set_file_processor
def process(fname):
pass
process(fname='input.csv')
Your problem doesn't seem to come under decorator but under factory pattern i.e. process differently based on the input file.
The below code is a very simple and basic Factory pattern solution to your problem, this should be modified accordingly as per your need,
import os
from abc import ABC, abstractmethod
class FileProcessor(ABC):
#abstractmethod
def process():
pass
class TextFileProcessor(FileProcessor):
def process(self, file_path):
print("Text file processing goes here")
class CsvFileProcessor(FileProcessor):
def process(self, file_path):
print("CSV file processing goes here")
class DefaultFileProcessor(FileProcessor):
def process(self, file_path):
raise ValueError("File %s is not valid" % file_path)
class FileFactory:
processors = {
'txt': TextFileProcessor,
'csv': CsvFileProcessor,
'default': DefaultFileProcessor
}
def __init__(self, file_path):
if not os.path.exists(file_path):
raise IOError("File not found")
self.file_path = file_path
def process(self):
dot_splits = self.file_path.split(".")
ext = dot_splits[-1] if len(dot_splits) > 1 else "default"
ext = ext if ext in self.processors else "default"
processor_class = self.processors.get(ext)
return processor_class().process(self.file_path)
FileFactory(file_path).process()
In later stage if you would like to add json processor then it can also be done easily by adding
processors = {
'txt': TextFileProcessor,
'csv': CsvFileProcessor,
'json': JsonFileProcessor,
'default': DefaultFileProcessor
}
and creating new Json processor class,
class JsonFileProcessor(FileProcessor):
def process(self, file_path):
print("JSON file processing goes here")
Based on your code and this very useful guide, here is a possible solution:
def read_file_decorator(fName):
def read_csv():
print('read_csv')
with open(fName, 'r') as f:
reader = csv.reader(f)
for row in reader:
print(row)
def read_txt():
print('read_txt')
f = open(fName, 'r')
for row in f:
print(row)
if fName.endswith('.csv'):
return read_csv
elif fName.endswith('.txt'):
return read_txt
else:
return None
reader_function = read_file_decorator(fileName)
if reader_function != None:
reader_function()
else:
print('not accept')
I use a stateful decorator remembering the file name inside the reader function before actually executing it (in order not to pass it twice); and I use the fixed value None for invalid file types.
Based on the requirements use of decorator would be an overkill of decorators. But if it's mandatory to implement this using decorator, this is how we can implement:
We can create a dummy function called read_file and a decorator function called reader
User will always call read_file with filename as argument and decorator function reader will check passed filename extension and call the required function - read_csv or read_text
def reader(fun):
def wrapper(*args):
fname = args[0]
if fname.endswith('.csv'):
read_csv(fname)
elif fname.endswith('.txt'):
read_text(fname)
else:
print('not accepted')
return wrapper
def read_csv(fname):
print('In read_csv()')
def read_text(fname):
print('In read_text()')
#reader
def read_file(fname):
pass
read_file('a.csv')
read_file('a.txt')
read_file('filename.py')
Output
In read_csv()
In read_text()
not accepted

Unit testing with external data file in Python

I am new to python and am trying to run unit tests injected with data from an external file:
class TestIsEquivalent(unittest.TestCase):
filename = os.getcwd() + '\\data\\test-data.txt'
with open(filename) as f:
testdata = f.readlines()
for value in testdata:
print(value)
def test_0(self):
result = isEquivalent(value, value, 0)
self.assertEqual(result, True)
I am getting the following error on line isEquivalent() is called with in the test function:
NameError: name 'value' is not defined
This would seem to suggest a scoping issue but I cannot figure out how to pass the value in the for loop into the test method. Any suggestions would be much appreciated.
Your complete test case should be wrapped inside a function. I'm not completely sure what you're trying to do with a function definition inside a for loop, but moving def test_0(self) to the top of your classing does the trick.
class TestIsEquivalent(unittest.TestCase):
def test_0(self):
filename = os.getcwd() + '\\data\\test-data.txt'
with open(filename) as f:
testdata = f.readlines()
for value in testdata:
print(value)
result = isEquivalent(value, value, 0)
self.assertEqual(result, True)
But if you want to create multiple test cases that all you the values inside of testdata, and you dont want to read the file multiple times, you can use the setUpClass function that is executed once at the start of all unittests inside a class. The result would look like this.
class TestIsEquivalent(unittest.TestCase):
#classmethod
def setUpClass(cls):
filename = os.getcwd() + '\\data\\test-data.txt'
with open(filename) as f:
cls.testdata = f.readlines()
def test_0(self):
for value in self.testdata:
print(value)
result = isEquivalent(value, value, 0)
self.assertEqual(result, True)

mocking a variable using mox

I want to test this method, however I would need to mock the variable dirContent
def imageFilePaths(paths):
imagesWithPath = []
for _path in paths:
try:
dirContent = os.listdir(_path)
except OSError:
raise OSError("Provided path '%s' doesn't exists." % _path)
for each in dirContent:
selFile = os.path.join(_path, each)
if os.path.isfile(selFile) and isExtensionSupported(selFile):
imagesWithPath.append(selFile)
return list(set(imagesWithPath))
how do I just mock a variable using mox ?
This is how I have however tried to mock os.listdir
def setUp(self):
self._filePaths = ["/test/file/path"]
self.mox = mox.Mox()
def test_imageFilePaths(self):
filePaths = self._filePaths[0]
self.mox.StubOutWithMock(os,'listdir')
dirContent = os.listdir(filePaths).AndReturn(['file1.jpg','file2.PNG','file3.png'])
self.mox.ReplayAll()
utils.imageFilePaths(filePaths)
self.mox.VerifyAll()
also tried this way
def test_imageFilePaths(self):
filePaths = self._filePaths
os = self.mox.CreateMock('os')
os.listdir = self.mox.CreateMock(os)
dirContent = os.listdir(filePaths).AndReturn(['file1.jpg','file2.PNG','file3.png'])
self.mox.ReplayAll()
lst = utils.imageFilePaths(filePaths)
# self.assertEquals('/test/file/path/file1.jpg', lst[0])
self.mox.VerifyAll()
but the call to method being tested doesn't recognizes the mocked discontent
Typically you would not mock a variable, but instead mock the function call used to set that variable's value. In your example, for instance, you'd mock out os.listdir and have it return a mock value.
# Your test file
import os
class YourTest(...):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
# Your test
def testFoo(self):
self.mox.StubOutWithMock(os, 'listdir')
# the calls you expect to listdir, and what they should return
os.listdir("some path").AndReturn([...])
self.mox.ReplayAll()
# ... the rest of your test

Wrapper to write to multiple streams

In python, is there an easy way to set up a file-like object for writing that is actually backed by multiple output streams? For instance, I want something like this:
file1 = open("file1.txt", "w")
file2 = open("file2.txt", "w")
ostream = OStreamWrapper(file1, file2, sys.stdout)
#Write to both files and stdout at once:
ostream.write("ECHO!")
So what I'm looking for is OStreamWrapper. I know it'd be pretty easy to write my own, but if there's an existing one, I'd rather use that and not have to worry about finding and covering edge cases.
class OStreamWrapper(object):
def __init__(self, *streams):
self.streams = list(streams)
def write(self, string):
for stream in self.streams:
stream.write(string)
def writelines(self, lines):
# If you want to use stream.writelines(), you have
# to convert lines into a list/tuple as it could be
# a generator.
for line in lines:
for stream in self.streams:
stream.write(line)
def flush(self):
for stream in self.streams:
stream.flush()
Way to wrap all public file functions:
import sys
def _call_for_all_streams(func_name):
def wrapper(self, *args, **kwargs):
result = []
for stream in self._streams:
func = getattr(stream, func_name)
result.append(func(*args, **kwargs))
return result
return wrapper
class OStreamWrapper(object):
def __init__(self, *streams):
self._streams = streams
for method in filter(lambda x: not x.startswith('_'), dir(file)):
setattr(OStreamWrapper, method, _call_for_all_streams(method))
if __name__ == '__main__':
file1 = open("file1.txt", "w")
file2 = open("file2.txt", "w")
ostream = OStreamWrapper(file1, file2, sys.stdout)
ostream.write("ECHO!")
ostream.close()
But it's kinda dirty.
Logbook is another option although it is more than that. Its handlers are more powerful and you can combine whatever you like.

Categories