Best way to handle 2 websocket connections in the same time - python

i am handling data from 2 websocket servers and i would like to know whats the fastest way to handle both connections in the same time given that the 1st connection would send data every 0.1-10ms.
what i am doing so far is:
import json
import websockets
async def run():
async with websockets.connect("ws://localhost:8546/") as ws1:
async with websockets.connect(uri="wss://api.blxrbdn.com/ws", extra_headers = {"Authorization": "apikey") as ws2:
sub1 = await ws1.send("subscription 1")
sub2 = await ws2.send("subscription 2")
while True:
try:
msg1 = await ws1.recv()
msg1 = json.loads(msg1)
msg2 = await ws2.recv()
msg2 = json.loads(msg2)
# process msg1 & msg2
except Exception as e:
print(e, flush=True)
asyncio.run(run())

As stated in the comments, try to handle each connection in its own coroutine. Here is small example:
import asyncio
import websockets
async def worker(ws, msg, t):
while True:
sub = await ws.send(msg)
print("Received from the server:", await ws.recv())
await asyncio.sleep(t)
async def run():
url1 = "ws://localhost:8765/"
url2 = "ws://something_different:8765/"
async with websockets.connect(url1) as ws1, websockets.connect(url2) as ws2:
await asyncio.gather(worker(ws1, "sub1", 1), worker(ws2, "sub2", 2))
asyncio.run(run())

Related

how can I use async in telethon

Please help me deal with async and threads in the telethon module. I'm trying to make a bot that, with a certain command placed in the queue, will either send a file or send a message, but I ran into difficulty
I'm trying to run telethon async inside the stream, but the clock function does not start, although the function works with the event
import asyncio
import threading
from queue import Queue
from telethon import TelegramClient, events, sync
async def send_message():
loop = asyncio.new_event_loop()
api_id = 11
api_hash = '11'
try:
client = TelegramClient('session', api_id, api_hash, loop=loop)
await client.connect()
if not await client.is_user_authorized():
client.disconnected()
await client.start()
except:
print('qqqqq')
#client.on(events.NewMessage(chats='me'))
async def handler(event):
await client.forward_messages('me', event.message)
async def clock():
global cl
while True:
while (queue_send_al.qsize() == 0):
print("33")
event_send_all.wait()
async with client:
cl = queue_send_al.get()
print(cl)
if cl[0] == 1:
await client.send_file('me', str(cl[1]),use_cache=False, part_size_kb=512)
event_send_all.clear()
elif cl[0] == 2:
print('1111')
event_send_all.clear()
loop.create_task(clock())
print('22')
await client.run_until_disconnected()
def go():
asyncio.run(send_message())
queue_send_al = Queue()
event_send_all = threading.Event()
queue_send_al.put([2, fr"11111111"])
event_send_all.set()
th3 = threading.Thread(target=go).start()

Python RabbitMQ Concurrency

In Spring, to set the concurrency for rabbitmq-consumer is so easy. Like:
container.setConcurrentConsumers(consumerSize);
container.setMaxConcurrentConsumers(consumerMaxSize);
Is it possible in python?
My python code looks like:
async def handle_message(loop):
connection = await connect(SETTINGS.cloudamqp_url, loop = loop)
channel = await connection.channel()
queue = await channel.declare_queue(SETTINGS.request_queue, durable=True)
await queue.consume(on_message, no_ack = True)
I solved my problem with using Thread:
My code looks like:
import threading
from aio_pika import connect, IncomingMessage, Message
import json
class QueueWorker(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.connection = None
self.channel = None
self.queue = None
async def init(self, loop):
self.connection = await connect(cloudamqp_url, loop=loop)
self.channel = await self.connection.channel()
await self.channel.set_qos(prefetch_count=1)
self.queue = await self.channel.declare_queue(queue, durable=True)
await self.queue.consume(self.callback, no_ack=False)
async def callback(self, message: IncomingMessage):
request = json.loads(message.body.decode("utf-8"))
try:
handle(request)
except Exception as e:
handleException...
finally:
await message.ack()
Consume queue with concurrency:
async def listen_queue(loop):
for _ in range(consumer_count):
td = QueueWorker()
await td.init(loop)
Note: Inspired from Consuming rabbitmq queue from inside python threads

Processing queue with asyncio only fires after all items are placed in queue using asyncio in python

I am trying to create some code in python that will put data from a generator (currently a simple counting loop but will be a sensor data at some point) and place it in a queue. Once in a queue i want to pull data off it and send it over a TCP connection. This is a great time to use asyncio but I am doing something wrong.
Currently, the script will process all the numbers and does not return anything. Ideally I would want to make sure I have something in the queue so it never empties and send a set amount of data over say like 5 numbers everytime. How can I achieve this?
import asyncio
import random
class responder():
def __init__(self, parent=None):
super().__init__()
async def produce(self,queue, n):
for x in range(n):
# produce an item
print('producing {}/{}'.format(x, n))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
item = str(x)
# put the item in the queue
await queue.put(item)
async def consume(self,queue):
while True:
# wait for an item from the producer
item = await queue.get()
# process the item
print('consuming {}...'.format(item))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
# Notify the queue that the item has been processed
queue.task_done()
async def run(self,n):
queue = asyncio.Queue()
# schedule the consumer
self.consumer = asyncio.ensure_future(self.consume(queue))
# run the producer and wait for completion
await self.produce(queue, n)
# wait until the consumer has processed all items
await queue.join()
# the consumer is still awaiting for an item, cancel it
self.consumer.cancel()
async def handle_echo(self,reader, writer):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print("Received %r from %r" % (message, addr))
if (message == 'START_RUN'):
data = await self.run(10)
print("Send: %i" % data)
writer.write(data)
await writer.drain()
else:
print("Send: %r" % message)
writer.write(message)
await writer.drain()
print("Close the client socket")
writer.close()
def launch_server(self):
self.loop = asyncio.get_event_loop()
self.coro = asyncio.start_server(self.handle_echo, '127.0.0.1', 7780, loop=self.loop)
self.server = self.loop.run_until_complete(self.coro)
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(self.server.sockets[0].getsockname()))
try:
self.loop.run_forever()
except KeyboardInterrupt:
pass
finally:
# Close the server
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.loop.close()
def main():
server = responder()
server.launch_server()
if __name__ == '__main__':
main()
The code generates the number stream but it runs through the entire list before moving on. Further I never get a value back.
My client code (which never gets anything back)
import asyncio
async def capture_stream(reader):
while not reader.at_eof:
data = await reader.read(100)
print( f'{who} received {len(data)} bytes' )
async def tcp_echo_client(message, loop):
reader, writer = await asyncio.open_connection('127.0.0.1',7780,loop=loop)
print('Send: %r' % message)
writer.write(message.encode())
if (message == "START_RUN"):
data = await reader.read(100)
print('Received: %r' % data.decode())
else:
collect_data = asyncio.create_task(capture_stream)
data = await collect_data
print('Close the socket')
writer.close()
message = 'START_RUN'
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(message, loop))
loop.close()

Run code block in parallel while streaming data

I have a code block to streaming online data, but in the mean while, I want another block to run from time to time for analysis.
class steaming_price():
def on_open(ws):
print('opened connection')
def on_close(ws):
print('closed connection')
def on_message(ws, message):
global closes, in_position, current_time
print('received message')
json_message = json.loads(message)
#pprint.pprint(json_message)
candle = json_message['k'] #all kline data
is_candle_closed = candle['x'] #if its closed
close = candle['c'] #the Close Price
#Print the Close Price
if is_candle_closed:
print("candle closed at {}".format(close))
closes.append(float(close))
current_time.append(datetime.datetime.now())
print(f"Time:{datetime.datetime.now()} Close:{close}")
ws = websocket.WebSocketApp(SOCKET, on_open=on_open, on_close=on_close, on_message=on_message)
ws.run_forever()
The following should run every 1 minutes to do analysis
class strategy():
def __init__(self):
self.closes = deque(maxlen=500)
def strategy(self, data):
self.macd, self.macd_signal, self.macd_hist = talib.MACD(data, fastperiod=12, slowperiod=26, signalperiod=9)
return self.macd, self.macd_signal, self.macd_hist
First block of code would take data each second.
I would like to have the second block of code run in parallel every 1 minute to do analysis. For example, calculate 20 bar Moving Average and issue a buy order.
I thought about async, but it only run once(I couldn't get it to work either).
Any ideas? Much appriciated
Easy solution would be to check on new message arrival how much time passed since last message was received and then if it's longer than 1 minute, do analysis on in the same function.
Another solution that you mentioned would require using asyncio websockets library:
server:
import asyncio
import json
from random import random
import websockets
async def hello(websocket, _):
while True:
value = random() * 50 + 50
value = json.dumps({"v": value})
await websocket.send(value)
print(f"sent {value}")
await asyncio.sleep(1)
start_server = websockets.serve(hello, "localhost", 8080)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
client:
import asyncio
import json
from collections import deque
import websockets
q = deque(maxlen=5)
async def calc_sum():
while True:
data = list(q)
print(f"sum {sum(data)}")
await asyncio.sleep(3.6)
async def hello():
uri = "ws://localhost:8080"
async with websockets.connect(uri) as websocket:
while True:
value = await websocket.recv()
value = json.loads(value)
print(f"recv {value}")
q.append(value['v'])
event_loop = asyncio.get_event_loop()
event_loop.create_task(calc_sum())
event_loop.run_until_complete(hello())

Reduce latency in Asyncio

I have an websocket server up and running fine using FastAPI.
However, when i am using those "await", i am getting some latency issues. At first, i though this had something to do with internet connection, or perhaps my linux server. But it appears to be that asyncio waits for other tasks.
Here is my code:
import asyncio
from pydantic import BaseModel
class UserClientWebSocket(BaseModel):
id: str
ws: WebSocket
class Config:
arbitrary_types_allowed = True
class ConnectionManager:
def __init__(self):
self.active_user_client_connections: List[UserClientWebSocket] = []
self.collect_user_IDs = []
async def connect_the_user_client(self, websocket: WebSocket, THE_USER_ID):
await websocket.accept()
await self.send_message_to_absolutely_everybody(f"User: {THE_USER_ID} connected to server!")
print("User: {} ".format(THE_USER_ID) + " Connected")
if THE_USER_ID not in self.collect_user_IDs:
self.collect_user_IDs.append(THE_USER_ID)
else:
await self.send_message_to_absolutely_everybody(f"Somebody connected with the same ID as client: {THE_USER_ID}")
await self.send_message_to_absolutely_everybody("but Vlori is a nice and kind guy, so he wil not get kicked :)")
self.collect_user_IDs.append(THE_USER_ID)
self.active_user_client_connections.append(UserClientWebSocket(ws=websocket, id=THE_USER_ID))
await self.show_number_of_clients()
async def function_one_send_message_to_absolutely_everybody(self, message: str):
try:
await asyncio.sleep(2)
await asyncio.gather(*(conn.ws.send_text(message) for conn in self.active_webpage_client_connections))
await asyncio.gather(*(conn.ws.send_text(message) for conn in self.active_user_client_connections))
except:
print("waiting")
async def function_two_send_personal_message_to_user(self, message: str, websocket: WebSocket):
try:
await websocket.send_text(message)
except:
print("waiting for task..")
...
...
...
...
...
and further down is the channel in which client connects:
#app.websocket("/ws/testchannel")
async def websocket_endpoint(websocket: WebSocket):
await websocket.accept()
try:
while True:
data = await websocket.receive_text()
print_result = print("Received data: {} ".format(data))
send_data_back_to_user = await websocket.send_text(f"you sent message: {data}")
except WebSocketDisconnect as e:
print("client left chat, error = ", e)
The code as it stands now works perfectly, and the performance is good! However, if i add an async def function under the "send_data_back_to_user" line such as this:
await connection_manager.function_one_send_message_to_absolutely_everybody(data)
Then there is a huge latency! why is that?
I am playing around and tried this:
#app.websocket("/ws/testchannel")
async def websocket_endpoint(websocket: WebSocket):
await websocket.accept()
try:
while True:
data = await websocket.receive_text()
print_result = print("Received data: {} ".format(data))
send_data_back_to_user = await websocket.send_text(f"you sent message: {data}") # if i comment this line and the line underneath, and the speed is extremely fast!
the_asyncio_loop = asyncio.get_event_loop()
print_data_on_terminal = asyncio.gather(print_result)
return_data_back_to_user = asyncio.gather(send_data_back_to_user)
broadcast_msg = asyncio.gather(connection_manager.function_one_send_message_to_absolutely_everybody(data))
run_all_loops_together = asyncio.gather(print_data_on_terminal, return_data_back_to_user, broadcast_msg)
results = the_asyncio_loop.run_until_complete(run_all_loops_together)
print(results)
except WebSocketDisconnect as e:
print("client left chat, error = ", e)
but gives me the error:
TypeError: An asyncio.Future, a coroutine or an awaitable is required
could someone help me with this?
thanks.

Categories