Scheduled HTTP Request using FastAPI - python

Inside my FastAPI application, I would like to schedule an HTTP request to be made to check for new results (comparing to database) every X time interval. What would be the easiest way to accomplish this using httpx?

You can add an async task to the event loop during the startup event. This async task would check (and sleep) and store the result somewhere. In the below example, I've chosen to pass around a shared object using the app.state feature of FastAPI. This should give you enough pointers to implement your exact use case. I have commented out an example of dealing with https specifically.
from fastapi import FastAPI
import asyncio
class MySharedObject:
def __init__(self) -> None:
self.count = 0
async def timed_checker(obj: MySharedObject):
while True:
obj.count += 1
# async with httpx.AsyncClient() as client:
# r = await client.get('https://www.example.com/')
await asyncio.sleep(3)
app = FastAPI()
#app.on_event("startup")
def startup_function():
app.state.shared_object = MySharedObject()
asyncio.create_task(timed_checker(app.state.shared_object))
#app.get("/")
async def root():
return {"hello": "world"}
#app.get("/count")
async def get_count():
return app.state.shared_object.count
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

Related

Alternative to asyncio.gather which I can keep adding coroutines to at runtime?

I need to be able to keep adding coroutines to the asyncio loop at runtime. I tried using create_task() thinking that this would do what I want, but it still needs to be awaited.
This is the code I had, not sure if there is a simple edit to make it work?
async def get_value_from_api():
global ASYNC_CLIENT
return ASYNC_CLIENT.get(api_address)
async def print_subs():
count = await get_value_from_api()
print(count)
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
time.sleep(0.1)
async def start():
global ASYNC_CLIENT
async with httpx.AsyncClient() as ASYNC_CLIENT:
await save_subs_loop()
asyncio.run(start())
I once created similar pattern when I was mixing trio and kivy, which was demonstration of running multiple coroutines asynchronously.
It use a trio.MemoryChannel which is roughly equivalent to asyncio.Queue, I'll just refer it as queue here.
Main idea is:
Wrap each task with class, which has run function.
Make class object's own async method to put object itself into queue when execution is done.
Create a global task-spawning loop to wait for the object in queue and schedule execution/create task for the object.
import asyncio
import traceback
import httpx
async def task_1(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/")
print(resp.read())
await asyncio.sleep(0.1) # without this would be IP ban
async def task_2(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/meow/")
print(resp.read())
await asyncio.sleep(0.5)
class CoroutineWrapper:
def __init__(self, queue: asyncio.Queue, coro_func, *param):
self.func = coro_func
self.param = param
self.queue = queue
async def run(self):
try:
await self.func(*self.param)
except Exception:
traceback.print_exc()
return
# put itself back into queue
await self.queue.put(self)
class KeepRunning:
def __init__(self):
# queue for gathering CoroutineWrapper
self.queue = asyncio.Queue()
def add_task(self, coro, *param):
wrapped = CoroutineWrapper(self.queue, coro, *param)
# add tasks to be executed in queue
self.queue.put_nowait(wrapped)
async def task_processor(self):
task: CoroutineWrapper
while task := await self.queue.get():
# wait for new CoroutineWrapper Object then schedule it's async method execution
asyncio.create_task(task.run())
async def main():
keep_running = KeepRunning()
async with httpx.AsyncClient() as client:
keep_running.add_task(task_1, client)
keep_running.add_task(task_2, client)
await keep_running.task_processor()
asyncio.run(main())
Server
import time
from flask import Flask
app = Flask(__name__)
#app.route("/")
def hello():
return str(time.time())
#app.route("/meow/")
def meow():
return "meow"
app.run()
Output:
b'meow'
b'1639920445.965701'
b'1639920446.0767004'
b'1639920446.1887035'
b'1639920446.2986999'
b'1639920446.4067013'
b'meow'
b'1639920446.516704'
b'1639920446.6267014'
...
You can see tasks running repeatedly on their own pace.
Old answer
Seems like you only want to cycle fixed amount of tasks.
In that case just iterate list of coroutine with itertools.cycle
But this is no different with synchronous, so lemme know if you need is asynchronous.
import asyncio
import itertools
import httpx
async def main_task(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/")
print(resp.read())
await asyncio.sleep(0.1) # without this would be IP ban
async def main():
async with httpx.AsyncClient() as client:
for coroutine in itertools.cycle([main_task]):
await coroutine(client)
asyncio.run(main())
Server:
import time
from flask import Flask
app = Flask(__name__)
#app.route("/")
def hello():
return str(time.time())
app.run()
Output:
b'1639918937.7694323'
b'1639918937.8804302'
b'1639918937.9914327'
b'1639918938.1014295'
b'1639918938.2124324'
b'1639918938.3204308'
...
asyncio.create_task() works as you describe it. The problem you are having here is that you create an infinite loop here:
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
time.sleep(0.1) # do not use time.sleep() in async code EVER
save_subs_loop() keeps creating tasks but control is never yielded back to the event loop, because there is no await in there. Try
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
await asyncio.sleep(0.1) # yield control back to loop to give tasks a chance to actually run
This problem is so common I'm thinking python should raise a RuntimeError if it detects time.sleep() within a coroutine :-)
You might want to try the TaskThread framework
It allows you to add tasks in runtime
Tasks are re-scheduled periodically (like in your while loop up there)
There is a consumer / producer framework built in (parent/child relationships) which you seem to need
disclaimer: I wrote TaskThread out of necessity & it's been a life saver.

Use anyio.TaskGroup with fastapi.StreamingResponse

anyio is a part of starlette and, therefore, of FastAPI. I find it quite convenient to use its task groups to perform concurrent requests to external services outside of one of my API servers.
Also, I would like to stream out the results as soon as they are ready. fastapi.StreamingResponse could do the trick, still I need to be able to keep the task group up and running after returning StreamingResponse, but it sounds like something that goes against the idea of structured concurrency.
Using an asynchronous generator may look like an obvious solution, but yield in general can not be used in a task group context, according to this: https://trio.readthedocs.io/en/stable/reference-core.html#cancel-scopes-and-nurseries
There is an example of a FastAPI server that seems to work, though it aggregates the responses before returning them:
import anyio
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
app = FastAPI()
#app.get("/")
async def root():
# What to put below?
result = await main()
return StreamingResponse(iter(result))
async def main():
send_stream, receive_stream = anyio.create_memory_object_stream()
result = []
async with anyio.create_task_group() as tg:
async with send_stream:
for num in range(5):
tg.start_soon(sometask, num, send_stream.clone())
async with receive_stream:
async for entry in receive_stream:
# What to do here???
result.append(entry)
return result
async def sometask(num, send_stream):
await anyio.sleep(1)
async with send_stream:
await send_stream.send(f'number {num}\n')
if __name__ == "__main__":
import uvicorn
# Debug-only configuration
uvicorn.run(app)
So, the question is, is there something similar to #trio_util.trio_async_generator in anyio, or is it possible to use #trio_util.trio_async_generator with FastAPI directly?
Maybe there are other solutions?
import anyio
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
app = FastAPI()
#app.get("/")
async def root():
return StreamingResponse(main())
async def main():
send_stream, receive_stream = anyio.create_memory_object_stream()
async with anyio.create_task_group() as tg:
async with send_stream:
for num in range(5):
tg.start_soon(sometask, num, send_stream.clone())
async with receive_stream:
async for entry in receive_stream:
yield entry
async def sometask(num, send_stream):
async with send_stream:
for i in range(1000):
await anyio.sleep(1)
await send_stream.send(f"number {num}\n")
if __name__ == "__main__":
import uvicorn
# Debug-only configuration
uvicorn.run(app)
unexpectedly, it works.

Expose discord bot to API (Flask, FASTAPI)

I'm building a discord bot to take commands from multiple systems and programs. I'm wanting to expose certain actions of my discord bot to REST endpoints and then execute said actions in one spot.
import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel
from typing import Optional
from discord.ext import commands
app = FastAPI()
TOKEN = 'MY_TOKEN'
bot = commands.Bot(command_prefix='>')
class Item(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
#app.get("/")
def hello():
return {"message":"Hello"}
#app.post("/items/")
async def create_item(item: Item):
await send_message()
return item
#bot.event
async def on_ready():
print(f'{bot.user.name} has connected to Discord!')
async def send_message():
user = await bot.fetch_user(USER_ID)
await user.send('👀')
if __name__ == "__main__":
bot.run('BOT_TOKEN')
uvicorn.run(app, host='0.0.0.0')
When I try to run this, I'm only seeing the bot active. I'm a little newer to python but a veteran programmer. Is this due to python's "lack" of multithreading? Or port usage?
The end goal is to call the "/items/" endpoint and see a message on discord sent to me
EDIT
I tried all the answers and coming up with some of my own. The problem is multi-threading. I got frustrated with it and ended up just moving this piece to Node.js. It doesn't technically fulfill this question but was far easier than navigating python multithreading.
server.js:
var express = require('express');
var app = express();
const Discord = require('discord.js');
const client = new Discord.Client();
app.get('/listUsers', function (req, res) {
dm_user();
res.send('hello');
})
client.on('ready', () => {
console.log(`Logged in as ${client.user.tag}!`);
});
client.on('message', msg => {
if (msg.content === 'ping') {
msg.reply('pong');
}
});
async function dm_user(id){
var my_user = await client.users.fetch('USER_ID');
console.log(my_user);
}
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("Example app listening at http://%s:%s", host, port)
client.login('TOKEN');
})
According to the discord.py docs bot.run() is "A blocking call that abstracts away the event loop initialisation from you." and further they said if we want more control over the loop we could use start() coroutine instead of run(). So now we should create a task for calling this coroutine and we know discord.py and FastAPI all are asynchronous applications. For starting a FastAPI app you need an ASGI server to handle it. In this case, we're using Uvicorn. So far we have run FastAPI app, now we need to start our discord bot. According to FastAPI docs we could use startup/shutdown event, for calling bot.start() coroutine before the main API starts.
Here is an example of an app which has an API endpoint for sending a message to a discord's user:
import asyncio
import discord
import uvicorn
from config import TOKEN, USER_ID
from fastapi import FastAPI
app = FastAPI()
bot = discord.Client()
#app.on_event("startup")
async def startup_event(): #this fucntion will run before the main API starts
asyncio.create_task(bot.start(TOKEN))
await asyncio.sleep(4) #optional sleep for established connection with discord
print(f"{bot.user} has connected to Discord!")
#app.get("/")
async def root(msg: str): #API endpoint for sending a message to a discord's user
user = await send_message(msg)
return {"Message": f"'{msg}' sent to {user}"}
async def send_message(message):
user = await bot.fetch_user(USER_ID)
await user.send(message)
return user #for optional log in the response of endpoint
if __name__ == "__main__":
uvicorn.run(app, host="localhost", port=5000)
Tested with Python 3.7.4
You are not returning anything from your send_message function. Something like this should do good.
#app.post("/items/")
async def create_item(item: Item):
msg = await send_message()
return msg
async def send_message():
user = await bot.fetch_user(USER_ID)
return await user.send('👀')
Code bot.run(...) runs all time and it blocks next line which starts API. You would have to run one of them in separated thread or process.
I tried to run bot in thread
if __name__ == "__main__":
import threading
print('Starting bot')
t = threading.Thread(target=bot.start, args=(TOKEN,))
t.start()
print('Starting API')
uvicorn.run(app, host='0.0.0.0')
but it gives me message that bot should run in main thread.
But I found question Discord bot and bottle in the same time in Python and base on it I create code which works for me
if __name__ == "__main__":
import asyncio
print('Starting bot')
bot_app = bot.start(TOKEN)
bot_task = asyncio.ensure_future(bot_app)
print('Starting API')
uvicorn.run(app, host='0.0.0.0')
But I'm not sure if this is ellegant method because uvicorn runs ayncio indirectly.
Full version
import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel
from typing import Optional
from discord.ext import commands
app = FastAPI()
#import os
#TOKEN = os.getenv("DISCORD_TOKEN")
TOKEN = 'MY_TOKEN'
bot = commands.Bot(command_prefix='>')
class Item(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
#app.get("/")
def hello():
return {"message":"Hello"}
#app.post("/items/")
async def create_item(item: Item):
await send_message()
return item
#bot.event
async def on_ready():
print(f'{bot.user.name} has connected to Discord!')
async def send_message():
user = await bot.fetch_user(USER_ID)
await user.send('👀')
if __name__ == "__main__":
import asyncio
print('Starting bot')
bot_app = bot.start(TOKEN)
bot_task = asyncio.ensure_future(bot_app)
print('Starting API')
uvicorn.run(app, host='0.0.0.0')

FastApi communication with other Api

I am using fastapi very recently and as an exercise I want to connect my fastapi api with a validation service on other server... but I do not know how to do this, I have not found something that will help me in the official documentation.. Will I have to do it with python code? Or is there a way?
FastApi docs
thank you for your help and excuse my english.
The accepted answer certainly works, but it is not an effective solution. With each request, the ClientSession is closed, so we lose the advantage [0] of ClientSession: connection pooling, keepalives, etc. etc.
We can use the startup and shutdown events [1] in FastAPI, which are triggered when the server starts and shuts down respectively. In these events it is possible to create a ClientSession instance and use it during the runtime of the whole application (and therefore utilize its full potential).
The ClientSession instance is stored in the application state. [2]
Here I answered a very similar question in the context of the aiohttp server: https://stackoverflow.com/a/60850857/752142
from __future__ import annotations
import asyncio
from typing import Final
from aiohttp import ClientSession
from fastapi import Depends, FastAPI
from starlette.requests import Request
app: Final = FastAPI()
#app.on_event("startup")
async def startup_event():
setattr(app.state, "client_session", ClientSession(raise_for_status=True))
#app.on_event("shutdown")
async def shutdown_event():
await asyncio.wait((app.state.client_session.close()), timeout=5.0)
def client_session_dep(request: Request) -> ClientSession:
return request.app.state.client_session
#app.get("/")
async def root(
client_session: ClientSession = Depends(client_session_dep),
) -> str:
async with client_session.get(
"https://example.com/", raise_for_status=True
) as the_response:
return await the_response.text()
[0] https://docs.aiohttp.org/en/stable/client_reference.html
[1] https://fastapi.tiangolo.com/advanced/events/
[2] https://www.starlette.io/applications/#storing-state-on-the-app-instance
You will need to code it with Python.
If you're using async you should use a HTTP client that is also async, for example aiohttp.
import aiohttp
#app.get("/")
async def slow_route():
async with aiohttp.ClientSession() as session:
async with session.get("http://validation_service.com") as resp:
data = await resp.text()
# do something with data

How to call asyncio functions inside flask application in python

I have a flask application that receives the request and trying to take a screenshot from the given URL which is done with asyncio function.
What I have done is ,
import asyncio
from pyppeteer import launch
from flask import Flask
import base64
from flask import Blueprint, jsonify, request
import jwt
async def main():
browser = await launch(headless=True)
page = await browser.newPage()
await page.goto(target)
await page.screenshot({'path': '/tmp/screen.png', 'fullPage': True})
await browser.close()
app = Flask(__name__)
#app.route('/heatMapDbConfigSave', methods=['POST'])
def notify():
token, target,id = map(
request.form.get, ('token', 'target','id'))
asyncio.get_event_loop().run_until_complete(main(target))
if __name__ == '__main__':
app.run(host='localhost', port=5002, debug=True)
The problem I have faced is, getting error RuntimeError: There is no current event loop in thread 'Thread-2'. .I have googled and gone through previous posts. None helped and not pointed a clear solution.
What was the solution to solve this?
Thanks in advance!
you can try something like below
from flask import Blueprint
import asyncio
health_check = Blueprint('health_check', __name__)
async def first():
await asyncio.sleep(20)
return 'first'
async def second():
await asyncio.sleep(10)
return 'second'
async def third():
await asyncio.sleep(10)
return 'third'
def ordinary_generator():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
for future in asyncio.as_completed([first(), second(), third()]):
print('reached')
yield loop.run_until_complete(future)
#health_check.route('', methods=['GET'])
def healthcheck():
"""
Retrieves the health of the service.
"""
for element in ordinary_generator():
print(element)
return "Health check passed"
Blueprints are not required, but just used them. You have to register the blueprint in the main app file like below
app = Flask(__name__)
app.register_blueprint(health_check, url_prefix='/api/v1/healthcheck')
if __name__ == '__main__':
app.run()

Categories