Disable a Python unit test with a message - python

My version of Python does not support
#unittest.skip("demonstrating skipping")
from Disable individual Python unit tests temporarily, I understand how to use a decorator to achieve this, i.e.,
def disabled(f):
def _decorator():
print f.__name__ + ' has been disabled'
return _decorator
#disabled
def testFoo():
'''Foo test case'''
print 'this is foo test case'
testFoo()
However, the decorator does not support providing a message for the skipping. May I know how I can achieve this please? I basically want something like
def disabled(f, msg):
def _decorator():
print f.__name__ + ' has been disabled' + msg
return _decorator
#disabled("I want to skip it")
def testFoo():
'''Foo test case'''
print 'this is foo test case'
testFoo()

You can modify the decorator like so:
def disabled(msg):
def _decorator(f):
def _wrapper():
print f.__name__ + ' has been disabled ' + msg
return _wrapper
return _decorator
#disabled("I want to skip it")
def testFoo():
'''Foo test case'''
print 'this is foo test case'
testFoo()

Related

How do I add an additional print statement for every print statement called in my code? (preferably using code decorators)

I want to check if a print statement is being called and add an additional print statement after it, without having to add an empty print statement manually after every instance.
My output is currently like this:
Hello
Hello
Hello
Hello
*(outputs this 10 times)*
while I would like it to look like this:
Hello
Hello
Hello
Hello
*(output 10 times as well in this pattern)*
I preferably want to use decorators as that is the easiest and best-looking solution, but another solution would also be okay!
I tried asking ChatGPT, but it doesn't change anything about the output.
Currently, this is my code:
def readability(func):
def wrapper(*args,**kwargs):
result = func(*args, **kwargs)
if callable(func) and func.__name__ == 'print':
print(' ')
return result
return wrapper
#readability
def printing():
j=10
while j>0:
print('hello')
j-=1
printing()
You could use contextlib.ContextDecorator to be able to use either a decorator or a context manager, which lets you control more finely where you want to use your modified print function.
In order to replace print, you just have to use print = your_own_function. You can still access the original print as __builtins__.print.
So, an example could be, with two sample personalized print functions which you can apply wherever you want: to a whole function using a decorator, or to a piece of code using a context manager:
from contextlib import ContextDecorator
def double_print(data):
__builtins__.print(data)
__builtins__.print()
def underlined_print(data):
__builtins__.print(data)
__builtins__.print('-'*len(str(data)))
class my_print_context(ContextDecorator):
def __init__(self, print_function):
self.print = print_function
def __enter__(self):
global print
print = self.print
return self
def __exit__(self, *exc):
global print
print = __builtins__.print
return False
def nums():
for i in range(3):
print(i)
We can use this like this:
#my_print_context(double_print)
def nums2():
for i in range(3):
print(i)
print('\nNormal')
nums()
print('\nDecorated')
nums2()
print('\nWith context manager')
with my_print_context(underlined_print):
print('Hello')
print('We are underlined')
nums()
print('\nAfter the context')
nums()
which outputs:
Normal
0
1
2
Decorated
0
1
2
With context manager
Hello
-----
We are underlined
-----------------
0
-
1
-
2
-
After the context
0
1
2
Original answer:
original_print = print
def print(*args):
original_print(*args)
original_print(' ')
print('Hello')
Improved answer using __builtins__:
def print(*args):
__builtins__.print(*args)
__builtins__.print(' ')
Both version produce the following output:
Hello
<blank line>

Detecting context manager nesting

I've been wondering recently if there's a way to detect whether a context manager is nested.
I've created Timer and TimerGroup classes:
class Timer:
def __init__(self, name="Timer"):
self.name = name
self.start_time = clock()
#staticmethod
def seconds_to_str(t):
return str(timedelta(seconds=t))
def end(self):
return clock() - self.start_time
def print(self, t):
print(("{0:<" + str(line_width - 18) + "} >> {1}").format(self.name, self.seconds_to_str(t)))
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
self.print(self.end())
class TimerGroup(Timer):
def __enter__(self):
print(('= ' + self.name + ' ').ljust(line_width, '='))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
total_time = self.seconds_to_str(self.end())
print(" Total: {0}".format(total_time).rjust(line_width, '='))
print()
This code prints timings in a readable format:
with TimerGroup("Collecting child documents for %s context" % context_name):
with Timer("Collecting context features"):
# some code...
with Timer("Collecting child documents"):
# some code...
= Collecting child documents for Global context ============
Collecting context features >> 0:00:00.001063
Collecting child documents >> 0:00:10.611130
====================================== Total: 0:00:10.612292
However, when I nest TimerGroups, it messed things up:
with TimerGroup("Choosing the best classifier for %s context" % context_name):
with Timer("Splitting datasets"):
# some code...
for cname, cparams in classifiers.items():
with TimerGroup("%s classifier" % cname):
with Timer("Training"):
# some code...
with Timer("Calculating accuracy on testing set"):
# some code
= Choosing the best classifier for Global context ==========
Splitting datasets >> 0:00:00.002054
= Naive Bayes classifier ===================================
Training >> 0:00:34.184903
Calculating accuracy on testing set >> 0:05:08.481904
====================================== Total: 0:05:42.666949
====================================== Total: 0:05:42.669078
All I need is to do is to indent the nested Timers and TimerGroups somehow. Should I pass any parameters to their constructors? Or can I detect that from inside the class?
There are no special facilities to detect nested context managers, no. You'd have to handle this on your own. You could do this within your own context manager:
import threading
class TimerGroup(Timer):
_active_group = threading.local()
def __enter__(self):
if getattr(TimerGroup._active_group, 'current', False):
raise RuntimeError("Can't nest TimerGroup context managers")
TimerGroup._active_group.current = self
print(('= ' + self.name + ' ').ljust(line_width, '='))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
TimerGroup._active_group.current = None
total_time = self.seconds_to_str(self.end())
print(" Total: {0}".format(total_time).rjust(line_width, '='))
print()
You can then use the TimerGroup._active_group attribute elsewhere to grab the currently active group. I used a thread-local object to ensure that this can be used across multiple threads of execution.
Alternatively, you could make that a stack counter and just increment and decrement in nested __enter__ calls, or a stack list and push self onto that stack, popping it again when you __exit__:
import threading
class TimerGroup(Timer):
_active_group = threading.local()
def __enter__(self):
if not hasattr(TimerGroup._active_group, 'current'):
TimerGroup._active_group.current = []
stack = TimerGroup._active_group.current
if stack:
# nested context manager.
# do something with stack[-1] or stack[0]
TimerGroup._active_group.current.append(self)
print(('= ' + self.name + ' ').ljust(line_width, '='))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
last = TimerGroup._active_group.current.pop()
assert last == self, "Context managers being exited out of order"
total_time = self.seconds_to_str(self.end())
print(" Total: {0}".format(total_time).rjust(line_width, '='))
print()
If all you need to do is adjust an indentation level based on how many nested context managers you're executing in, then have a class attribute called indent_level and adjust it each time you enter and exit a context manager. Something like the following:
class Context:
indent_level = 0
def __init__(self, name):
self.name = name
def __enter__(self):
print(' '*4*self.indent_level + 'Entering ' + self.name)
self.adjust_indent_level(1)
return self
def __exit__(self, *a, **k):
self.adjust_indent_level(-1)
print(' '*4*self.indent_level + 'Exiting ' + self.name)
#classmethod
def adjust_indent_level(cls, val):
cls.indent_level += val
And use it as:
>>> with Context('Outer') as outer_context:
with Context('Inner') as inner_context:
print(' '*inner_context.indent_level*4 + 'In the inner context')
Entering Outer
Entering Inner
In the inner context
Exiting Inner
Exiting Outer
import this:
Explicit is better than implicit
A cleaner design would explicitly allow to specify a group:
with TimerGroup('Doing big task') as big_task_tg:
with Timer('Foo', big_task_tg):
foo_result = foo()
with Timer('Bar', big_task_tg):
bar(baz(foo_result))
On the other hand, you can always use traceback.extract_stack and look for invocations of a particular function upstream. It is very useful for logging and error reporting, and can be moderately useful to ensure that particular functions are only invoked in a certain context. But it tends to create dependencies that are very hard to track.
I would avoid it for grouping timers, though you can try. If you badly need automatic grouping, #Martijn-Pieters's approach is far superior.

Using Decorator for Data Driven Tests

I am trying to parametrize a function using decorator. Finally I am able to get it run as expected after lot of hit and trials. But still I am not satisfied as though it is working, it doesn't seem to be right way to do it.
Please help me improve this code.
Here is my code:
def WarmWelcome(fn):
def wrapped(DataProvider):
for name in DataProvider():
print fn(name) + ":)"
return wrapped
def DataProvider():
names=["abc","xyz","def"]
for name in names:
yield name
#WarmWelcome
def hello(name):
return "hello " +name
hello(DataProvider)
Here is the updated code:
def WarmWelcome(DataProvider):
def real_decorator(fn):
def wrapped():
for name in DataProvider():
print fn(name) + ":)"
return wrapped
return real_decorator
def DataProvider():
names=["abc","xyz","def"]
for name in names:
yield name
#WarmWelcome(DataProvider)
def hello(name):
return "hello " +name
hello()
It's also possible to provide dataset right to WarmWelcome decorator:
def WarmWelcome(*data_sets):
def _decorator(func):
def _func():
for ds in data_sets:
func(*ds)
return _func
return _decorator
#WarmWelcome(
("abc", ),
("xyz", ),
("def", ),
)
def hello(name):
return "hello " +name
Original: PHPUnit-like dataProvider implementation for Python unittest

why python exec define class not working

Here is code, the class 'demo' defined by exec is not working when create a demo instance in _getTestObj().
FileName: test.py
class runOneIni():
def _getTestObj(self):
demo(self.tcName,secSetup,doc)
def start():
#implicit define 'demo' class by exec is not working, get error in runOneIni->_getTestObj, Error is :
# NameError: name 'demo' is not defined
a='global demo'
exec(a)
str="class demo(tInvokeMethod.tInvokeMethod): pass'
exec(str)
#Explict define demo class is working
#global demo
#class demo(tInvokeMethod.tInvokeMethod): pass
if __name__ == '__main__':
start()
(1) You have an unterminated string
(2) It is unnecessary to use exec to do this. class is itself an executable statement, which can appear anywhere any other statement can (except for a place where an expression statement is required).
You could do something like this:
class SomeBaseClass(object):
def __init__(self):
self.a = 1
self.b = 2
def make_new_class(name):
class TemplateClass(SomeBaseClass):
def __init__(self):
SomeBaseClass.__init__(self)
self.a = 3
return type(name, (TemplateClass,), {})
o1 = SomeBaseClass()
print o1.a, o1.b
NewClass = make_new_class('NewClass')
o2 = NewClass()
print o2.a, o2.b
Result:
1 2
3 2
The problem is not with defining a class via exec. The following works as intended:
exec 'class hi: pass'
Your problem is that "global" inside an exec statement has no effect outside it. According to the python documentation for exec:
the global is a directive to the parser. It applies only to code parsed at the same time as the global statement. In particular, a global statement contained in an exec statement does not affect the code block containing the exec statement, and code contained in an exec statement is unaffected by global statements in the code containing the exec statement.
Why are you doing that? (exec apart)
Why are you trying to do that with exec?
Also, doing it with exec will:
Not work.
Give different results in python-2.x and in python-3.x.
Example:
class demo:
a = 'a'
print(demo.a)
def start():
global demo
class demo: b = "b"
try:
print(demo.a)
except AttributeError:
print(demo.b)
if __name__ == '__main__':
start()
try:
print(demo.a)
except AttributeError:
print(demo.b)
That either in python-2.x and in python-3.x will give:
a
b
b
And now let's try it with exec:
class demo:
a = 'a'
print(demo.a)
def start():
exec('global demo', globals(), locals())
exec('class demo: b = "b"', globals(), locals())
try:
print(demo.a)
except AttributeError:
print(demo.b)
if __name__ == '__main__':
start()
try:
print(demo.a)
except AttributeError:
print(demo.b)
Output python2.7:
a
b
a
Output python3.2:
a
a
a
Q: How to 'dynamically create the class'?
As kindall already told you, exec is not the way to do that.
A metaclass or a class factory does that, but are you sure you actually need that?
I might be a little late to the party but I came up with something that seems to work okay. It will even correct type because of the setting property.
I'm sure this is all horribly unpythonic, but I think it's kinda fun.
def generateClass(propertyNames,propertyTypes):
string = 'class generatedClass(object):\n def __init__(self):\n'
for pN in propertyNames:
string += ' self._m' + pN + ' = None\n'
string += ' \n \n'
i = 0
for pN in propertyNames:
string += ' #property\n' \
' def ' + pN + '(self):\n' \
' return self._m' + pN + '\n' \
' #' + pN + '.setter' +'\n' \
' def ' + pN + '(self,a'+ pN + '):\n' \
' if a' + pN + ':\n'\
' self._m'+ pN + ' = ' + propertyTypes[i] + '(a' + pN + ')\n'\
' \n'
i += 1
exec(string)
return generatedClass()
if __name__ == '__main__':
c = generateClass(['SomePropertyName'],['str'])
print c.__dict__
setattr(c,'SomePropertyName','some string')
print c.__dict__
You need to add the global demoin the same exec string.
here the code with the result
class RunOneIni:
def _getTestObj(self):
self.tcName = 'tn'
print(demo(self.tcName, 'secSetup', 'doc').__class__.__name__)
def start():
t = 'class tInvokeMethod:\n\tclass tInvokeMethod:\n\t\tpass'
exec(t)
d = 'global demo\nclass demo(tInvokeMethod.tInvokeMethod):\n\tdef __init__(self, *args): pass'
exec(d)
demo()
if __name__ == '__main__':
start()
RunOneIni()._getTestObj()

Method for dry runs?

at the moment my python code often looks like this:
...
if not dry_run:
result = shutil.copyfile(...)
else:
print " DRY-RUN: shutil.copyfile(...) "
...
I now think about writting something like a dry runner method:
def dry_runner(cmd, dry_run, message, before="", after=""):
if dry_run:
print before + "DRY-RUN: " + message + after
# return execute(cmd)
But the cmd will be executed first and the result is given to dry_runner method.
How can I code such a method the pythonic way?
You could use this generic wrapper function:
def execute(func, *args):
print 'before', func
if not dry:
func(*args)
print 'after', func
>>> execute(shutil.copyfile, 'src', 'dst')
This isn't perfect in its display, but the functionality works. Hopefully this is clear enough:
dry = True
def dryrun(f):
def wrapper(*args, **kwargs):
if dry:
print "DRY RUN: %s(%s)" % (f.__name__,
','.join(list(args) + ["%s=%s" % (k, v) for (k, v) in kwargs.iteritems()]))
else:
f(*args, **kwargs)
return wrapper
import shutil
copyfile = dryrun(shutil.copyfile)
copyfile('a', 'b')

Categories