My question is closely related to the following question on Stackoverflow and the documentation here.
I am defining a websockets-connection as a class. Next, I create a new class where I call the earlier defined websocket-class as self.ws and tell which data to send to the websocket with self.request.
My problem is that the current script only runs once, whereas my desired output is continuous data.
The second link shows that I can retrieve continuous / streaming data using
asyncio.get_event_loop().run_until_complete(call_api(json.dumps(msg)))
I include all of the above code in my code (call_api is defined differently due to the desire to write it as a class). Below is my code:
import sys, json
import asyncio
from websockets import connect
class EchoWebsocket:
def __init__(self, URL, CLIENT_ID=None, CLIENT_SECRET=None):
self.url = URL
self.client_id = CLIENT_ID
self.client_secret = CLIENT_SECRET
async def __aenter__(self):
self._conn = connect(self.url)
self.websocket = await self._conn.__aenter__()
return self
async def __aexit__(self, *args, **kwargs):
await self._conn.__aexit__(*args, **kwargs)
async def send(self, message):
await self.websocket.send(message)
async def receive(self):
return await self.websocket.recv()
class DERIBIT:
def __init__(self):
self.ws = EchoWebsocket(URL='wss://test.deribit.com/ws/api/v2')
self.loop = asyncio.get_event_loop()
self.request = \
{"jsonrpc": "2.0",
"method": "public/subscribe",
"id": 42,
"params": {
"channels": ["deribit_price_index.btc_usd"]}
}
def get_ticks(self):
return self.loop.run_until_complete(self.__async__get_ticks())
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
response = await echo.receive()
print(response)
if __name__ == "__main__":
deribit = DERIBIT()
deribit.get_ticks()
This script gives the following output:
{"jsonrpc": "2.0", "method": "public/subscribe", "id": 42, "params": {"channels": ["deribit_price_index.btc_usd"]}}
whereas I would like to see
Please advice.
I only worked with Tornado's websockets but they work pretty well and Tornado has many helpers for dealing with async code:
import json
import tornado
from tornado.ioloop import PeriodicCallback
from tornado.websocket import websocket_connect
class EchoWebsocket:
def __init__(self, url, client_id=None, client_secret=None):
self.url = url
self.client_id = client_id
self.client_secret = client_secret
self.websocket = None
async def connect(self):
if not self.websocket:
self.websocket = await websocket_connect(self.url)
async def close(self):
await self.websocket.close()
self.websocket = None
async def read(self):
return await self.websocket.read_message()
async def write(self, message):
await self.websocket.write_message(message)
class DERIBIT:
def __init__(self):
self.ws = EchoWebsocket(url='wss://test.deribit.com/ws/api/v2')
self.request = {
"jsonrpc": "2.0",
"method": "public/subscribe",
"id": 42,
"params": {
"channels": ["deribit_price_index.btc_usd"]}
}
self.callback = PeriodicCallback(self.get_ticks, 1000)
self.callback.start()
async def get_ticks(self):
if not self.ws.websocket:
await self.ws.connect()
await self.ws.write(json.dumps(self.request))
response = await self.ws.read()
print(response)
if __name__ == "__main__":
deribit = DERIBIT()
tornado.ioloop.IOLoop.current().start()
Output:
{"jsonrpc":"2.0","id":42,"result":["deribit_price_index.btc_usd"],"usIn":1587298852138977,"usOut":1587298852139023,"usDiff":46,"testnet":true}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298851526,"price":7173.46,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298852533,"price":7173.53,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","id":42,"result":["deribit_price_index.btc_usd"],"usIn":1587298852932540,"usOut":1587298852932580,"usDiff":40,"testnet":true}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587298852533,"price":7173.53,"index_name":"btc_usd"}}}
The example above could be simplified a lot if you integrate the websocket into the DERIBIT class rather than create a separate class for it.
the problem is in the function
first loop.run_until_complete run until the future is complete doc run_until_complete
that mean your function receive will run only one response. run_until_complete is not a callback function!.
so in your case the main:
deribit.get_ticks() -> run the future instance __async__get_ticks
so __async__get_ticks is task: let's see what the task do:
1.open ws connection:
2.send request
3.wait the response of the ws
4. print(response)
here the task is done that why you see only one line
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
response = await echo.receive()
print(response)
after explanation: the solution will be simple:
need to wrap the line response
with while
async def __async__get_ticks(self):
async with self.ws as echo:
await echo.send(json.dumps(self.request))
while True:
response = await echo.receive()
print(response)
output
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654476817,"price":7540.54,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654477824,"price":7540.52,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654478831,"price":7540.15,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654479838,"price":7539.83,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654480845,"price":7539.2,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654481852,"price":7538.96,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654482859,"price":7538.9,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654483866,"price":7538.89,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654484873,"price":7538.47,"index_name":"btc_usd"}}}
{"jsonrpc":"2.0","method":"subscription","params":{"channel":"deribit_price_index.btc_usd","data":{"timestamp":1587654485880,"price":7537.15,"index_name":"btc_usd"}}}
Related
In Spring, to set the concurrency for rabbitmq-consumer is so easy. Like:
container.setConcurrentConsumers(consumerSize);
container.setMaxConcurrentConsumers(consumerMaxSize);
Is it possible in python?
My python code looks like:
async def handle_message(loop):
connection = await connect(SETTINGS.cloudamqp_url, loop = loop)
channel = await connection.channel()
queue = await channel.declare_queue(SETTINGS.request_queue, durable=True)
await queue.consume(on_message, no_ack = True)
I solved my problem with using Thread:
My code looks like:
import threading
from aio_pika import connect, IncomingMessage, Message
import json
class QueueWorker(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.connection = None
self.channel = None
self.queue = None
async def init(self, loop):
self.connection = await connect(cloudamqp_url, loop=loop)
self.channel = await self.connection.channel()
await self.channel.set_qos(prefetch_count=1)
self.queue = await self.channel.declare_queue(queue, durable=True)
await self.queue.consume(self.callback, no_ack=False)
async def callback(self, message: IncomingMessage):
request = json.loads(message.body.decode("utf-8"))
try:
handle(request)
except Exception as e:
handleException...
finally:
await message.ack()
Consume queue with concurrency:
async def listen_queue(loop):
for _ in range(consumer_count):
td = QueueWorker()
await td.init(loop)
Note: Inspired from Consuming rabbitmq queue from inside python threads
Good day!
I am trying to code a WebSocket connector and using asyncio. I am not that much familiar with asynchronous approaches therefore an incorrect behaviour occurs. Below is the simplified version of the code.
import pandas as pd
import json
import websockets
import asyncio
import time
class BinanceQuotesWS:
def __init__(self,client,pair):
self.quotes = pd.DataFrame(columns=['Timestamp','Price'])
self.pair = pair
self.socket='wss://fstream.binance.com/ws'
self.websocket = None
self.loop = None
self.result = None
def get_quotes(self):
return self.quotes
def start(self):
self.loop = asyncio.get_event_loop()
self.result = self.loop.create_task(self.connect())
async def connect(self):
self.websocket = await websockets.connect(self.socket)
await self.subscribe_quotes()
async def subscribe_quotes(self):
subscribe_message = {
"method": "SUBSCRIBE",
"params":
[
self.pair.lower()+"#trade"
],
"id": 1
}
subscribe_message = json.dumps(subscribe_message)
await self.websocket.send(subscribe_message)
async for msg in self.websocket:
msg = json.loads(msg)
if('p' in msg):
self.quotes.loc[0] = [msg['E'],float(msg['p'])]
temp_ws = BinanceQuotesWS(client,'BTCUSDT')
temp_ws.start()
When I am testing it in Jupyter and execute a cell with temp_ws.get_quotes() manually then every single time the correct dataframe with fresh quotes is returned.
Though in my program I need to have some infinite loop and there comes up an error.
while(True):
quotes = temp_ws.get_quotes()
print(quotes)
time.sleep(3)
The quotes DF is always empty but I can't sort out why (probably because the while cycle is blocking). I will be glad if someone could help to sort out the issue (and give some hints if anything else could be improved in the code in terms of async requests). Thank you.
You could use asyncio.sleep to create async function
async def display(self):
while True:
await asyncio.sleep(3)
quotes = self.get_quotes()
print('time:', quotes['Timestamp'][0], 'price:', quotes['Price'][0])
and add it to loop
self.result2 = self.loop.create_task(self.display())
and then you can run all in the same loop
temp_ws.loop.run_forever()
If you not use run_forever() then it not run connect() - and you don't get values in your standard loop. But this loop has to runs all time and it can't runs at the same time with normal loop (which also has to run all the time). One of the loop would have to run in separated thread.
But await (whit asyncio.sleep) resolves problem. When it sleeps in while True then it goes to other functions and it can run other code - and later some other code uses await and then it can go back to while True.
Maybe in Jupyter it could work with run_forever() because they add many extra functions to make life easier (and elements used in Jupyter may need this loop to work correctly) but in normal program you have to use run_forever() manually.
Minimal working code:
import pandas as pd
import json
import websockets
import asyncio
import time
class BinanceQuotesWS:
def __init__(self,client,pair):
self.quotes = pd.DataFrame(columns=['Timestamp','Price'])
self.pair = pair
self.socket='wss://fstream.binance.com/ws'
self.websocket = None
self.loop = None
self.result = None
def get_quotes(self):
return self.quotes
def start(self):
self.loop = asyncio.get_event_loop()
self.result = self.loop.create_task(self.connect())
self.result2 = self.loop.create_task(self.display())
async def connect(self):
self.websocket = await websockets.connect(self.socket)
await self.subscribe_quotes()
async def subscribe_quotes(self):
subscribe_message = {
"method": "SUBSCRIBE",
"params": [
self.pair.lower()+"#trade"
],
"id": 1
}
subscribe_message = json.dumps(subscribe_message)
await self.websocket.send(subscribe_message)
async for msg in self.websocket:
msg = json.loads(msg)
if('p' in msg):
self.quotes.loc[0] = [msg['E'],float(msg['p'])]
#print(self.quotes)
async def display(self):
while True:
await asyncio.sleep(3)
quotes = self.get_quotes()
print('time:', quotes['Timestamp'][0], 'price:', quotes['Price'][0])
client = ''
temp_ws = BinanceQuotesWS(client,'BTCUSDT')
temp_ws.start()
temp_ws.loop.run_forever()
What's wrong with implementation of passing class instance as a dependency in FastAPI router or is it a bug?
1) I have defined router with dependency
app = FastAPI()
dbconnector_is = AsyncDBPool(conn=is_cnx, loop=None)
app.include_router(test_route.router, dependencies=[Depends(dbconnector_is)])
#app.on_event('startup')
async def startup():
app.logger = await AsyncLogger().getlogger(log)
await app.logger.warning('Webservice is starting up...')
await app.logger.info("Establishing RDBS Integration Pool Connection...")
await dbconnector_is.connect()
#app.on_event('startup')
async def startup():
await app.logger.getlogger(log)
await app.logger.warning('Webservice is starting up...')
await dbconnector_is.connect()
router
#router.get('/test')
async def test():
data = await dbconnector_is.callproc('is_processes_get', rows=-1, values=[None, None])
return Response(json.dumps(data, default=str))
custom class for passing it's instance as callable.
class AsyncDBPool:
def __init__(self, conn: dict, loop: None):
self.conn = conn
self.pool = None
self.connected = False
self.loop = loop
def __call__(self):
return self
async def connect(self):
while self.pool is None:
try:
self.pool = await aiomysql.create_pool(**self.conn, loop=self.loop, autocommit=True)
except aiomysql.OperationalError as e:
await asyncio.sleep(1)
continue
else:
return self.pool
And when I send request I receive this error.
data = await dbconnector_is.callproc('is_processes_get', rows=-1, values=[None, None])
NameError: name 'dbconnector_is' is not defined
I want to mock the json() coroutine from the aiohttp.ClientSession.get method. It looks to return an async generator object, which is where I'm confused on how to mock in my example. Here is my code:
async def get_access_token():
async with aiohttp.ClientSession(auth=auth_credentials) as client:
async with client.get(auth_path, params={'grant_type': 'client_credentials'}) as auth_response:
assert auth_response.status == 200
auth_json = await auth_response.json()
return auth_json['access_token']
This is my test case to mock the get method:
json_data = [{
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}]
class AsyncMock:
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
#pytest.mark.asyncio
async def test_wow_api_invalid_credentials(monkeypatch, mocker):
def mock_client_get(self, auth_path, params):
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json = mocker.MagicMock(return_value=json_data)
return mock_response
monkeypatch.setattr('wow.aiohttp.ClientSession.get', mock_client_get)
result = await wow.get_access_token()
assert result == 'HSG9hsf328bJSWO82sl'
I think the problem might be that mock_response.json() is not awaitable. In my example I can't call await from a non async function so I'm confused on how I would do that. I would like to keep the test libraries to a minimum which is pytest and pytest-asyncio for the learning experiencing and to rely less on 3rd party libraries.
I was making it more complicated than it needed to be. I simply defined json as an awaitable attribute of AsyncMock which returns the json_data. The complete code looks like this:
json_data = {
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}
class AsyncMock:
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
async def json(self):
return json_data
#pytest.mark.asyncio
async def test_wow_api_invalid_credentials(monkeypatch):
def mock_client_get(self, auth_path, params):
mock_response = AsyncMock()
mock_response.status = 200
return mock_response
monkeypatch.setattr('wow.aiohttp.ClientSession.get', mock_client_get)
result = await wow.get_access_token()
assert result == 'HSG9hsf328bJSWO82sl'
This is part 1, but i suggest you watch part2.
Im not sure i understand your question totally, because using async def or #asyncio.coroutine can help you do this. Actually, i want to write it as comment, however there are so many differences that i can't put it into comment.
import asyncio
json_ = [{
'access_token': 'HSG9hsf328bJSWO82sl',
'expires_in': 86399,
'token_type': 'bearer'
}]
async def response_from_sun():
return json_
class AsyncMock:
async def specify(self):
return self.json[0].get("access_token")
async def __aenter__(self):
return self
async def __aexit__(self, *error_info):
return self
async def mock_client_get():
mock_response = AsyncMock()
mock_response.status = 200
mock_response.json = await response_from_sun()
return mock_response
async def go():
resp = await mock_client_get()
result = await resp.specify()
assert result == 'HSG9hsf328bJSWO82sl'
asyncio.get_event_loop().run_until_complete(go())
PART2
After adding my answer, i found there is a problem about your mock_response content. Becausemock_response does not contain variable and function which ClientResponse have.
Edit: I try many times and watch ClientSession's code, then i found you can specify a new response class by its parameter. Note: connector=aiohttp.TCPConnector(verify_ssl=False) is unnecessary
import asyncio
import aiohttp
class Mock(aiohttp.ClientResponse):
print("Mock")
async def specify(self):
json_ = (await self.json()).get("hello")
return json_
async def go():
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),response_class=Mock) as session:
resp = await session.get("https://www.mocky.io/v2/5185415ba171ea3a00704eed")
result = await resp.specify()
print(result)
assert result == 'world'
asyncio.get_event_loop().run_until_complete(go())
The toy script shows an application using a class that is dependent on an implementation that is not asyncio-aware, and obviously doesn't work.
How would the fetch method of MyFetcher be implemented, using the asyncio-aware client, while still maintaining the contract with the _internal_validator method of FetcherApp? To be very clear, FetcherApp and AbstractFetcher cannot be modified.
To use async fetch_data function inside fetch both fetch and is_fetched_data_valid functions should be async too. You can change them in child classes without modify parent:
import asyncio
class AsyncFetcherApp(FetcherApp):
async def is_fetched_data_valid(self): # async here
data = await self.fetcher_implementation.fetch() # await here
return self._internal_validator(data)
class AsyncMyFetcher(AbstractFetcher):
def __init__(self, client):
super().__init__()
self.client = client
async def fetch(self): # async here
result = await self.client.fetch_data() # await here
return result
class AsyncClient:
async def fetch_data(self):
await asyncio.sleep(1) # Just to sure it works
return 1
async def main():
async_client = AsyncClient()
my_fetcher = AsyncMyFetcher(async_client)
fetcherApp = AsyncFetcherApp(my_fetcher)
# ...
is_valid = await fetcherApp.is_fetched_data_valid() # await here
print(repr(is_valid))
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())