dynamically add and remove sockets - python

How would I add / remove sockets dynamically on the with / as statement. Or would i need a completely other aproach like asyncio.create_task?
import asyncio
from binance import AsyncClient, BinanceSocketManager
api_key = config.binance_key
api_secret = config.binance_secret
async def main():
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
# start any sockets here, i.e a trade socket
ts1 = bm.symbol_ticker_socket('BNBBTC')
ts2 = bm.symbol_ticker_socket('ETHBUSD')
# then start receiving messages
async with ts1 as tscm1, ts2 as tscm2:
while True:
res1 = await tscm1.recv()
res2 = await tscm2.recv()
print(res1)
print(res2)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())

Use AsyncExitStack to combine and handle multiple asynchronous context managers.
An auxiliary coroutine read_ticker is used to read data from a ticker socket connection.
import asyncio
from binance import AsyncClient, BinanceSocketManager
from contextlib import AsyncExitStack
api_key = config.binance_key
api_secret = config.binance_secret
async def read_ticker(ts_cm):
"""Read ticker data from ticker socket connection"""
while True:
res = await ts_cm.recv()
print(res)
async def main():
client = await AsyncClient.create()
bm = BinanceSocketManager(client)
# start any sockets here, i.e a trade socket
ts1 = bm.symbol_ticker_socket('BNBBTC')
ts2 = bm.symbol_ticker_socket('ETHBUSD')
async with AsyncExitStack() as stack:
await asyncio.gather(*[read_ticker(await stack.enter_async_context(ts))
for ts in (ts1, ts2)])
if __name__ == "__main__":
asyncio.run(main())

Related

Pandas Concat 500 CSV Files Within S3

I do have around 500 CSV file within S3 Bucket
Note: Each file size is around 1 GB, Around ~6m of lines.
What am trying to do is to concatenate all those CSV files into a single file.
I'm trying to speed up the Process but am not sure what else i can do in such case!
Below is my code:
import trio
import boto3
import pandas as pd
from functools import partial
AWS_ID = 'Hidden'
AWS_SECRET = 'Hidden'
Bucket_Name = 'Hidden'
limiter = trio.CapacityLimiter(10)
async def read_object(bucket, object_csv, sender):
async with limiter, sender:
print(f'Reading {object_csv}')
test = bucket.Object(object_csv)
test = test.get()['Body']
data = await trio.to_thread.run_sync(partial(pd.read_csv, test, header=None))
await sender.send(data)
async def main():
async with trio.open_nursery() as nurse:
s3 = boto3.resource(
service_name='s3',
aws_access_key_id=AWS_ID,
aws_secret_access_key=AWS_SECRET,
)
bucket = s3.Bucket(Bucket_Name)
allfiles = [i.key for i in bucket.objects.all()]
sender, receiver = trio.open_memory_channel(0)
nurse.start_soon(rec, receiver)
async with sender:
for f in allfiles:
nurse.start_soon(read_object, bucket, f, sender.clone())
async def rec(receiver):
alldf = []
async with receiver:
async for df in receiver:
alldf.append(df)
final = pd.concat(alldf, ignore_index=True)
print(final)
if __name__ == "__main__":
try:
trio.run(main)
except KeyboardInterrupt:
exit('Job Cancelled!')
The part which is taking time:
data = await trio.to_thread.run_sync(partial(pd.read_csv, test, header=None))
For a single file it's taking about 2 minutes. Also I'm running the operation under threads! so it's taking a lot!
Update: New Code is Below:
limiter = trio.CapacityLimiter(10)
async def read_object(bucket, object_csv, sender):
async with limiter, sender:
print(f'Reading {object_csv}')
test = bucket.Object(object_csv)
test = test.get()['Body']
data = await trio.to_thread.run_sync(test.read)
await sender.send(data)
print(f'Done Reading {object_csv}')
async def main():
async with trio.open_nursery() as nurse:
s3 = boto3.resource(
service_name='s3',
aws_access_key_id=AWS_ID,
aws_secret_access_key=AWS_SECRET,
)
bucket = s3.Bucket(Bucket_Name)
sender, receiver = trio.open_memory_channel(0)
nurse.start_soon(rec, receiver)
async with sender:
for csv in bucket.objects.all():
nurse.start_soon(read_object, bucket, csv.key, sender.clone())
async def rec(receiver):
async with receiver, await trio.open_file('output.csv', 'wb') as f:
count = 0
async for df in receiver:
count += 1
await f.write(df)
await f.write(b"\n")
print(f'Collected {count}', flush=True, end='\r')
if __name__ == "__main__":
try:
trio.run(main)
except KeyboardInterrupt:
exit('Job Cancelled!')
Based on that code, is it possible to speed up the process more than that ?

How to send messages to telegram from the StreamListener handler (twitter)? tweepy & telethon

I manage to send only 1 message after launching the script, after which it hangs and no longer receives messages from Twitter
If I remove the block of code I wrapped in "------------------------------" then I will receive all the tweets, but when I try to send it to Telegram, it stops after the first time
Initially did without separate threads, because I could not achieve the result
Wrapped everything in separate threads, but the result is the same
What am I doing wrong?
from telethon import TelegramClient, events, sync
from telethon.tl.types import InputChannel
import tweepy
import yaml
import sys
import coloredlogs, logging
import asyncio
import threading
import concurrent.futures
import time
start_twitter = threading.Event()
forwardinput_channel_entities = []
forwardoutput_channels = {}
class MyStreamListener(tweepy.StreamListener):
def on_status(self, status):
user_id = status.user.id
if user_id in forwardoutput_channels:
for output_channel in forwardoutput_channels[user_id]:
message = status.text
logging.info('-------------')
logging.info(message)
# ------------------------------
try:
loop = asyncio.get_event_loop()
except Exception as e:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
logging.error(e)
pass
loop.run_until_complete(telegram_client.send_message(
output_channel['channel'], message))
# ------------------------------
def twitter_thred():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
auth = tweepy.OAuthHandler(config['twitter_consumer_api'],
config['twitter_consumer_secret'])
auth.set_access_token(config['twitter_user_api'],
config['twitter_user_secret'])
global twitter_api
twitter_api = tweepy.API(auth)
myStreamListener = MyStreamListener()
myStream = tweepy.Stream(auth=twitter_api.auth, listener=myStreamListener)
start_twitter.wait()
myStream.filter(follow=forwardinput_channel_entities,
is_async=True)
def telegram_thred():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
global telegram_client
telegram_client = TelegramClient(config['session_name'],
config['api_id'],
config['api_hash'])
telegram_client.start()
for forwardto in config['forwardto_list_ids']:
for twitter_user_id in forwardto['from']:
forwardinput_channel_entities.append(str(twitter_user_id))
channels = []
for channel in telegram_client.iter_dialogs():
if channel.entity.id in forwardto['to']:
channels.append({
'channel': InputChannel(
channel.entity.id, channel.entity.access_hash),
})
forwardoutput_channels[twitter_user_id] = channels
start_twitter.set()
telegram_client.run_until_disconnected()
def start():
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
future = executor.submit(telegram_thred)
future = executor.submit(twitter_thred)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(f'Usage: {sys.argv[0]} {{CONFIG_PATH}}')
sys.exit(1)
with open(sys.argv[1], 'rb') as f:
global config
config = yaml.safe_load(f)
coloredlogs.install(
fmt='%(asctime)s.%(msecs)03d %(message)s',
datefmt='%H:%M:%S')
start()
An example of a yml config to run the script:
# telegram
api_id: *****************
api_hash: '*****************'
session_name: 'test'
# twitter
twitter_consumer_api: '*****************'
twitter_consumer_secret: '*****************'
twitter_user_api: '*****************'
twitter_user_secret: '*****************'
forwardto_list_ids:
- from:
- 0000000000 # account twitter id
to:
- 0000000000 # telegram channel id
As noted, Tweepy doesn't support asyncio with streaming yet, so it's blocking the event loop when you run the stream. is_async uses a threaded approach.
For now, you should look into using Tweepy's async-streaming branch / https://github.com/tweepy/tweepy/pull/1491.

AttributeError: module 'select' has no attribute 'select' error ASYNCIO

I am executing the below code on a windows pc. I read that, by default, Windows can use only 64 sockets in asyncio loop. I don't know if this is the reason for the error.
import aiohttp
import asyncio
import time
async def download_file(url):
print(f'started downloading{url}')
connector = aiohttp.TCPConnector(limit=60)
async with aiohttp.clientSession(connector) as session:
async with session.get(url) as resp:
content = await resp.read()
print (f'Finished download{url}')
return content
async def write_file(n, content):
filename = f'async_{n}.html'
with open(filename,'wb') as f:
print(f'started writing{filename}')
f.write(content)
print(f'Finished writing{filename}')
async def scrape_task(n,url):
content = await download_file(url)
await write_file(n,content)
async def main():
tasks = []
for n,url in enumerate(open('urls.txt').readlines()):
tasks.append((scrape_task(n, url)))
await asyncio.wait(tasks)
if __name__ == '__main__':
t=time.perf_counter()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
t2 = time.perf_counter() - t
print(f'Total time taken: {t2:0.2f} seconds')
I made the below changes to limit the connections to 60
connector = aiohttp.TCPConnector(limit=60)
async with aiohttp.clientSession(connector) as session:
I can't figure out where I am going wrong.

Use websocket and I/O serial together in python?

I am struggling to have my both my websocket script and my I/O serial script running together in one together.
Just some basic info before I continue:
I am using Windows PC(Have no access to linux PC)
This is the reason why I am using the AIOserial library instead of pyserial-asyncio
I have no "super" experience with asyncio, so be kind please :)
Here is my "old" websocket script:
from aiohttp import web
import socketio
import aiohttp_cors
import asyncio
import random
# creates a new Async Socket IO Server
sio = socketio.AsyncServer()
# Creates
app = web.Application()
sio.attach(app)
server_is_responding = "Message from the server:"
the_response = "Hello there!"
async def index(request):
with open('index.html') as f:
print("Somebody entered the server from the browser!")
return web.Response(text=f.read(), content_type='text/html')
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
#return send_message_to_client()
#sio.on('sendTextToServer')
async def message(sid, data):
print("message: " , data)
if data == "hei":
await sio.emit("ServerMessage", {"hehe"})
if data == "lol":
await sio.emit("ServerMessage", {"Message from server:": "hehe, funny right?.."})
else:
await sio.emit("ServerMessage", {"Message from server:": "Hello There!"})
# We bind our aiohttp endpoint to our app
# router
cors = aiohttp_cors.setup(app)
app.router.add_get('/', index)
# We kick off our server
if __name__ == '__main__':
web.run_app(app)
And here is my I/O serial script(which works and read the data), that I am trying to use with some of the websocket functions above:
import asyncio
import websockets
import socketio
import aiohttp_cors
import logging
from AIOExtensions.AIOSerial import (AIOSerial, AIOSerialClosedException,
AIOSerialErrorException, AIOSerialNotOpenException)
logging.basicConfig(level=logging.DEBUG)
sio = socketio.AsyncServer()
async def hello(websocket, path):
name = await websocket.recv()
print(f"< {name}")
greeting = f"Hello {name}!"
await websocket.send(greeting)
print(f"> {greeting}")
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
async def read_IO_serial():
try:
async with AIOSerial('COM8', baudrate=115200, line_mode=True) as aios:
await asyncio.sleep(100)
try:
while True:
# read with timeout
rcvd = await asyncio.wait_for(aios.read(), timeout=1.0)
# print the data received
print(f"data received: {rcvd}")
if rcvd == b'RF initialized\n':
print("CATCHED THIS LINE!")
except asyncio.TimeoutError:
print("reception timed out ;-(")
except AIOSerialNotOpenException:
print("Unable to open the port!")
print()
print("Have you specified the right port number? COM7? COM8?")
# port fatal error
except AIOSerialErrorException:
print("Port error!")
# port already closed
except AIOSerialClosedException:
print("Serial port is closed!")
start_server = websockets.serve(hello, "http://192.168.1.6", 8080)
#sio.attach(start_server) # HOW CAN I ATTACH THIS SO IT CAN BE USED WITH THE SIO FUNCTIONS BELOW?
if start_server:
print("Server started!")
asyncio.run(read_IO_serial())
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
As you can see in my first simple websocket script, I could use "sio.attach(app)" which made it possible to listed to events from client, so I need a way of replacing this "app" on my current script..
Someone who can please help me with this?
I solved it using asyncio.gather(), this is how I dit it:
from aiohttp import web
import socketio
import aiohttp_cors
import asyncio
import random
​
import asyncio as aio
import logging
​
import sys
​
# creates a new Async Socket IO Server
sio = socketio.AsyncServer()
# Creates
app = web.Application()
sio.attach(app)
​
server_is_responding = "Message from the server:"
the_response = "Hello there!"
​
​
async def index(request):
with open('index.html') as f:
print("Somebody entered the server from the browser!")
return web.Response(text=f.read(), content_type='text/html')
​
​
#sio.event
async def join(sid, message):
sio.enter_room(sid, message['room'])
await sio.emit('my_response', {'data': 'Entered room: ' + message['room']}, room=sid)
​
​
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
​
​
#sio.on("receiveMessageFromServer")
async def message(sid, data):
print("message: ", data)
# await asyncio.sleep(1 * random.random())
return "OKKKK", 123
​
​
​
from AIOExtensions.AIOSerial import (AIOSerial, AIOSerialClosedException,
AIOSerialErrorException, AIOSerialNotOpenException)
​
logging.basicConfig(level=logging.DEBUG)
​
​
async def read_IO_serial():
try:
​
async with AIOSerial('COM8', baudrate=115200, line_mode=True) as aios:
# aios.sp.baudrate = 230400
# aios.sp.baudrate = 115200
​
# await aios.write(b"AT\r\n")
​
# await aios.read()
​
# await aios.close()
​
await aio.sleep(100)
​
try:
while True:
​
# read with timeout
rcvd = await aio.wait_for(aios.read(), timeout=1.0)
# print the data received
print(f"data received: {rcvd}")
​
if rcvd == b'RF initialized\n':
print("CATCHED THIS LINE!")
​
​
except aio.TimeoutError:
print("reception timed out ;-(")
​
except AIOSerialNotOpenException:
print("Unable to open the port!")
print()
print("Have you specified the right port number? COM7? COM8?")
# port fatal error
except AIOSerialErrorException:
print("Port error!")
# port already closed
except AIOSerialClosedException:
print("Serial port is closed!")
​
​
async def on_startup(app):
pass
​
​
cors = aiohttp_cors.setup(app)
app.router.add_get('/', index)
​
# We kick off our server
if __name__ == '__main__':
loop = asyncio.get_event_loop()
group2 = asyncio.gather(read_IO_serial())
group1 = asyncio.gather(web.run_app(app))
​
all_groups = asyncio.gather(group1, group2)
​
results = loop.run_until_complete(all_groups)
​
# loop.close()
​
#print(results)

Python asyncio / aiohttp error

I am writing a simple producer/consumer app to call multiple URL's asynchronously.
In the following code if I set the conn_count=1, and add 2 items to the Queue it works fine as only one consumer is created. But if I make conn_count=2 and add 4 items to the Queue only 3 request are being made. The other request fails with ClientConnectorError.
Can you please help be debug the reason for failure with multiple consumers? Thank You.
I am using a echo server I created.
Server:
import os
import logging.config
import yaml
from aiohttp import web
import json
def start():
setup_logging()
app = web.Application()
app.router.add_get('/', do_get)
app.router.add_post('/', do_post)
web.run_app(app)
async def do_get(request):
return web.Response(text='hello')
async def do_post(request):
data = await request.json()
return web.Response(text=json.dumps(data))
def setup_logging(
default_path='logging.yaml',
default_level=logging.INFO,
env_key='LOG_CFG'
):
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
if __name__ == '__main__':
start()
Client:
import asyncio
import collections
import json
import sys
import async_timeout
from aiohttp import ClientSession, TCPConnector
MAX_CONNECTIONS = 100
URL = 'http://localhost:8080'
InventoryAccount = collections.namedtuple("InventoryAccount", "op_co customer_id")
async def produce(queue, num_consumers):
for i in range(num_consumers * 2):
await queue.put(InventoryAccount(op_co=i, customer_id=i * 100))
for j in range(num_consumers):
await queue.put(None)
async def consumer(n, queue, session, responses):
print('consumer {}: starting'.format(n))
while True:
try:
account = await queue.get()
if account is None:
queue.task_done()
break
else:
print(f"Consumer {n}, Updating cloud prices for account: opCo = {account.op_co!s}, customerId = {account.customer_id!s}")
params = {'opCo': account.op_co, 'customerId': account.customer_id}
headers = {'content-type': 'application/json'}
with async_timeout.timeout(10):
print(f"Consumer {n}, session state " + str(session.closed))
async with session.post(URL,
headers=headers,
data=json.dumps(params)) as response:
assert response.status == 200
responses.append(await response.text())
queue.task_done()
except:
e = sys.exc_info()[0]
print(f"Consumer {n}, Error updating cloud prices for account: opCo = {account.op_co!s}, customerId = {account.customer_id!s}. {e}")
queue.task_done()
print('consumer {}: ending'.format(n))
async def start(loop, session, num_consumers):
queue = asyncio.Queue(maxsize=num_consumers)
responses = []
consumers = [asyncio.ensure_future(loop=loop, coro_or_future=consumer(i, queue, session, responses)) for i in range(num_consumers)]
await produce(queue, num_consumers)
await queue.join()
for consumer_future in consumers:
consumer_future.cancel()
return responses
async def run(loop, conn_count):
async with ClientSession(loop=loop, connector=TCPConnector(verify_ssl=False, limit=conn_count)) as session:
result = await start(loop, session, conn_count)
print("Result: " + str(result))
if __name__ == '__main__':
conn_count = 2
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(run(loop, conn_count))
finally:
loop.close()
Reference:
https://pymotw.com/3/asyncio/synchronization.html
https://pawelmhm.github.io/asyncio/python/aiohttp/2016/04/22/asyncio-aiohttp.html
https://hackernoon.com/asyncio-for-the-working-python-developer-5c468e6e2e8e

Categories