Create a specific amount of function duplicates and run them simultaneously - python

I have an async program based around a set of multiple infinitely-running functions that run simultaneously.
I want to allow users to run a specific amount of specific function duplicates.
A code example of what I have now:
async def run():
await asyncio.gather(
func_1(arg1, arg2),
func_2(arg2),
func_3(arg1, arg3),
)
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
loop.close()
Let's say a user wants to run 2 instances of func_2. I want the core code to look like this:
async def run():
await asyncio.gather(
func_1(arg1, arg2),
func_2(arg2),
func_2(arg2),
func_3(arg1, arg3),
)
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
loop.close()
Any way to elegantly achieve this?

I would handle it this way (if we talk about elegancy).
# A tuple of the available functions
functions = (func_1, func_2, func_3)
async def run(**kwargs):
def _tasks():
for function in functions:
yield from [function() for _ in range(kwargs.get(function.__name__, 1))]
await asyncio.gather(*_tasks())
loop = asyncio.get_event_loop()
loop.run_until_complete(run(func_2=2)) # Run only the func_2 twice
loop.close()
Edit
Interesting. If my func_1, func_2, and func_3 all have different set arguments inside them, how would I pass them in your solution?
If you hold the coroutines of the async functions in the functions tuple, you will not get the expected result. You can have an additional class that will handle it. It takes all the necessary arguments of a particular function and returns its coroutine on performing a call on its object.
class Coroutine:
def __init__(self, function, *args, **kwargs):
self._function = function
self._args = args
self._kwargs = kwargs
self.__name__ = function.__name__
async def __call__(self):
await self._function(*self._args, **self._kwargs)
Also, you will need to change the tuple of functions as follows.
functions = (
Coroutine(func_1, "arg"),
Coroutine(func_2, "arg1", kw1="kw"),
Coroutine(func_3),
)

Related

Python Mockito: How do I set up async mocks?

The following code works as expected for the synchronous part but gives me a TypeError for the async call (TypeError: object NoneType can't be used in 'await' expression), presumably because the mock constructor can't properly deal with the spec. How do I properly tell Mockito that it needs to set up an asynchronous mock for async_method ?
class MockedClass():
def sync_method(self):
pass
async def async_method(self):
pass
class TestedClass():
def do_something_sync(self, mocked: MockedClass):
mocked.sync_method()
async def do_something_async(self, mocked: MockedClass):
await mocked.async_method()
#pytest.mark.asyncio
async def test():
my_mock = mock(spec=MockedClass)
tested_class = TestedClass()
tested_class.do_something_sync(my_mock)
verify(my_mock).sync_method()
await tested_class.do_something_async(my_mock) # <- Fails here
verify(my_mock).async_method()
Edit:
For reference, this is how it works with the standard mocks (the behavior that I expect):
In mockito my_mock.async_method() would not return anything useful by default and without further configuration. (T.i. it returns None which is not awaitable.)
What I did in the past:
# a helper function
def future(value=None):
f = asyncio.Future()
f.set_result(value)
return f
# your code
#pytest.mark.asyncio
async def test():
my_mock = mock(spec=MockedClass)
when(my_mock).async_method().thenReturn(future(None)) # fill in whatever you expect the method to return
# ....

Inner wrapper function never called

I'm currently working on a small Telegram bot library that uses PTB under the hood.
One of the syntactic sugar features is this decorator function I use, to ensure code locality.
Unfortunately, the inner wrapper is never called (in my current library version, I have an additional wrapper called #aexec I place below #async_command, but that's not ideal)
def async_command(name: str):
def decorator(func):
updater = PBot.updater
updater.dispatcher.add_handler(CommandHandler(name, func, run_async=True))
def wrapper(update: Update, context: CallbackContext):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(func(update, context))
loop.close()
return wrapper
return decorator
#async_command(name="start")
async def start_command(update: Update, context: CallbackContext) -> None:
update.message.reply_text(text="Hello World")
"def wrapper" inside "async_command" is never called, so the function is never executed.
Any idea how to achieve this without needing an additional decorator to start a new asyncio event loop?
Note: PBot is just a simple class that contains one static "updater" that can be re-used anywhere in the code (PTB uses "updater" instances, which is not ideal for my use cases)
EDIT: I notice that the issue of the inner wrapper not being called only happens on async function. Is this a case of differing calling conventions?
I actually figured it out myself.
I took the existing #aexec and chained the two functions internally, creating a new decorator.
def aexec(func):
def wrapper(update: Update, context: CallbackContext):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(func(update, context))
loop.close()
return wrapper
def _async_command(name: str):
def wrapper(func):
updater = PBot.updater
updater.dispatcher.add_handler(CommandHandler(name, func, run_async=True))
return wrapper
def async_command(name: str):
run_first = _async_command(name)
def wrapper(func):
return run_first(aexec(func))
return wrapper
Now I can use #async_command(name="name_of_command") and it will first call _async_command, then aexec

Decorators: make actions after function is awaited

I use Python 3.7 and have following decorator:
def decorator(success_check: function):
def wrap(func):
async def inner(root, info, **args):
func_result = await func(root, info, **args)
if not success_check(func_result):
pass # do some action
return func(root, info, **args)
return inner
return wrap
In a current implementation func is awaited two times. Can I make it work with func awaited once?
if you call return await func(root, info, **args), or, event better, just do return func_result, most likely, it will solve your issue

call async function in main function

I would like to know if it's possible to call an async function def get_all_allowed_systems in create_app function so that I have access to the database entries of ALLOWED_SYSTEMS populated by get_all_allowed_systems call. I have a limitation that I can't make create_app as async function.
async def get_all_allowed_systems(app):
global ALLOWED_SYSTEMS
operation = prepare_exec(app.config.get_all_systems_procedure)
ALLOWED_SYSTEMS = (await app['database'].execute(operation)).all()
def create_app():
app = App(config=Config)
app['database'] = AioDatabase(**app.config.dict('db_'))
app['app_database'] = AioDatabase(app.config.app_db_url)
get_all_allowed_systems(app)
print(ALLOWED_SYSTEMS)
In Python 3.7+ you can just use asyncio.run(coroutine())
In earlier versions you have to get the event loop and run from there:
loop = asyncio.get_event_loop()
asyncio.ensure_future(coroutine())
loop.run_forever()
loop.close()

How to timeout an async test in pytest with fixture?

I am testing an async function that might get deadlocked. I tried to add a fixture to limit the function to only run for 5 seconds before raising a failure, but it hasn't worked so far.
Setup:
pipenv --python==3.6
pipenv install pytest==4.4.1
pipenv install pytest-asyncio==0.10.0
Code:
import asyncio
import pytest
#pytest.fixture
def my_fixture():
# attempt to start a timer that will stop the test somehow
asyncio.ensure_future(time_limit())
yield 'eggs'
async def time_limit():
await asyncio.sleep(5)
print('time limit reached') # this isn't printed
raise AssertionError
#pytest.mark.asyncio
async def test(my_fixture):
assert my_fixture == 'eggs'
await asyncio.sleep(10)
print('this should not print') # this is printed
assert 0
--
Edit: Mikhail's solution works fine. I can't find a way to incorporate it into a fixture, though.
Convenient way to limit function (or block of code) with timeout is to use async-timeout module. You can use it inside your test function or, for example, create a decorator. Unlike with fixture it'll allow to specify concrete time for each test:
import asyncio
import pytest
from async_timeout import timeout
def with_timeout(t):
def wrapper(corofunc):
async def run(*args, **kwargs):
with timeout(t):
return await corofunc(*args, **kwargs)
return run
return wrapper
#pytest.mark.asyncio
#with_timeout(2)
async def test_sleep_1():
await asyncio.sleep(1)
assert 1 == 1
#pytest.mark.asyncio
#with_timeout(2)
async def test_sleep_3():
await asyncio.sleep(3)
assert 1 == 1
It's not hard to create decorator for concrete time (with_timeout_5 = partial(with_timeout, 5)).
I don't know how to create texture (if you really need fixture), but code above can provide starting point. Also not sure if there's a common way to achieve goal better.
There is a way to use fixtures for timeout, one just needs to add the following hook into conftest.py.
Any fixture prefixed with timeout must return a number of seconds(int, float) the test can run.
The closest fixture w.r.t scope is chosen. autouse fixtures have lesser priority than explicitly chosen ones. Later one is preferred. Unfortunately order in the function argument list does NOT matter.
If there is no such fixture, the test is not restricted and will run indefinitely as usual.
The test must be marked with pytest.mark.asyncio too, but that is needed anyway.
# Add to conftest.py
import asyncio
import pytest
_TIMEOUT_FIXTURE_PREFIX = "timeout"
#pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_setup(item: pytest.Item):
"""Wrap all tests marked with pytest.mark.asyncio with their specified timeout.
Must run as early as possible.
Parameters
----------
item : pytest.Item
Test to wrap
"""
yield
orig_obj = item.obj
timeouts = [n for n in item.funcargs if n.startswith(_TIMEOUT_FIXTURE_PREFIX)]
# Picks the closest timeout fixture if there are multiple
tname = None if len(timeouts) == 0 else timeouts[-1]
# Only pick marked functions
if item.get_closest_marker("asyncio") is not None and tname is not None:
async def new_obj(*args, **kwargs):
"""Timed wrapper around the test function."""
try:
return await asyncio.wait_for(
orig_obj(*args, **kwargs), timeout=item.funcargs[tname]
)
except Exception as e:
pytest.fail(f"Test {item.name} did not finish in time.")
item.obj = new_obj
Example:
#pytest.fixture
def timeout_2s():
return 2
#pytest.fixture(scope="module", autouse=True)
def timeout_5s():
# You can do whatever you need here, just return/yield a number
return 5
async def test_timeout_1():
# Uses timeout_5s fixture by default
await aio.sleep(0) # Passes
return 1
async def test_timeout_2(timeout_2s):
# Uses timeout_2s because it is closest
await aio.sleep(5) # Timeouts
WARNING
Might not work with some other plugins, I have only tested it with pytest-asyncio, it definitely won't work if item is redefined by some hook.
I just loved Quimby's approach of marking tests with timeouts. Here's my attempt to improve it, using pytest marks:
# tests/conftest.py
import asyncio
#pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem: pytest.Function):
"""
Wrap all tests marked with pytest.mark.async_timeout with their specified timeout.
"""
orig_obj = pyfuncitem.obj
if marker := pyfuncitem.get_closest_marker("async_timeout"):
async def new_obj(*args, **kwargs):
"""Timed wrapper around the test function."""
try:
return await asyncio.wait_for(orig_obj(*args, **kwargs), timeout=marker.args[0])
except (asyncio.CancelledError, asyncio.TimeoutError):
pytest.fail(f"Test {pyfuncitem.name} did not finish in time.")
pyfuncitem.obj = new_obj
yield
def pytest_configure(config: pytest.Config):
config.addinivalue_line("markers", "async_timeout(timeout): cancels the test execution after the specified amount of seconds")
Usage:
#pytest.mark.asyncio
#pytest.mark.async_timeout(10)
async def potentially_hanging_function():
await asyncio.sleep(20)
It should not be hard to include this to the asyncio mark on pytest-asyncio, so we can get a syntax like:
#pytest.mark.asyncio(timeout=10)
async def potentially_hanging_function():
await asyncio.sleep(20)
EDIT: looks like there's already a PR for that.

Categories