Why isn't the value updated when publishing to a Redis channel? - python

I'm trying to use Redis in my FastAPI -application and struggling to update a value when publishing to a channel.
This is my Redis client setup:
import logging
import redis as _redis
logger = logging.getLogger(__name__)
REDIS_URL = "redis://redis:6379"
class RedisClient:
__instance = None
client: _redis.Redis
def __new__(cls) -> "RedisClient":
if cls.__instance is None:
cls.__instance = object.__new__(cls)
try:
logger.info("Connecting to Redis")
client = _redis.Redis.from_url(REDIS_URL)
cls.client = client
except Exception:
logger.error("Unable to connect to Redis")
logger.info("Connected to Redis")
return cls.__instance
def disconnect(self):
logger.info("Closing Redis connection")
self.client.connection_pool.disconnect()
redis = RedisClient().client
redis_pubsub = redis.pubsub()
This is my POST route what I'm using to test updating the value:
#router.post("/", response_model=Any)
async def post(tracing_no: int) -> Any:
data = {"tracing_no": tracing_no, "result": 200, "client_id": "10"}
data = json.dumps(data)
result = redis.publish(TestChannel.TestTopic.value, data)
return redis.get(TestChannel.TestTopic.value)
This code returns always "{\"tracing_no\": 5, \"result\": 200, \"client_id\": \"10\"}", no matter what number I give as tracking_no when I send the request. I think 5 was the first number I used when testing and that is in my dump.rdb -file.
Any ideas why my tracking_no isn't updating?

Related

How to instantiate a class correctly using Depency Injector in a Flask app?

In my Flask app, I'm using Dependency Injection and here's what my app looks like. I have a service which uses S3 as a datastore and I'm trying to instantiate my app with the service injected (which is injected with the S3 client). However, it doesn't look like the S3 client is correctly instantiated or I'm doing something wildly different.
containers.py
class Container(containers.DeclarativeContainer):
wiring_config = containers.WiringConfiguration(modules=[".routes", ".scheduler"])
config = providers.Configuration()
s3_config = dict()
s3_config["path"], s3_config["filters"] = config.get("s3_bucket"), [("member_status_nm", "=", "ACTIVE")]
s3_repository = providers.Singleton(S3Repository, s3_config)
my_service = providers.Factory(
MyService, config, S3Repository
)
Here's my S3Repository:
import logging
import sys
import time
import some_library as lib
class S3Repository:
def __init__(self, s3_config):
self.path, self.columns, self.filters = \
s3_config.get("path", ""), s3_config.get("columns", []), s3_config.get("filters", [])
def fetch(self):
# execute fetch
result = lib.some_fetch_method(self.path, self.columns, self.filters)
return result
and MyService:
import #all relevant imports here
class MyService:
def __init__(self, config: dict, s3_repository: S3Repository) -> None:
logging.info("HealthSignalService(): initializing")
self.config = config["app"]["health_signal_service"]
# prepare s3_repository for the service
self.s3_repository = s3_repository
self.s3_repository.columns, self.s3_repository.filters, self.s3_repository.path = \
["x","y"], ["x1","y1"], "file_path"
def fetch_data(self) -> None:
try:
summary_result = self.s3_repository.fetch()
except (FileNotFoundError, IOError) as e:
print("failure")
return summary_result
def get_data(memberId):
sth = self.fetch_data()
return sth.get(memberId)
and finally tying it together in my routes.py:
#inject
#auth.login_required
def get_signals(
my_service: MyService = Provide[
Container.my_service
],
):
content = request.json
member_id = content["memberId"]
result = my_service.get_signals(member_id)
return jsonify(result)
When I hit my API endpoint I see this error:
summary_result = self.s3_repository.fetch()
TypeError: fetch() missing 1 required positional argument: 'self'
How do I correctly initialize my S3 client while using dependency injection?

Tornado gives error Cannot write() after finish()

I am using the Tornado chat demo example from here: https://github.com/tornadoweb/tornado/tree/master/demos/chat
and just altering it very slightly.
The code change is just a small class called Connections and a bit in the MessageNewHandler(). All I am doing is just saving a reference to self and trying to write(message) to a previous client.
But when I go to save on this line conns.conns[0].write(message) I get this error message:
[E 220107 23:18:38 web:1789] Uncaught exception POST /a/message/new (::1)
HTTPServerRequest(protocol='http', host='localhost:8888', method='POST', uri='/a/message/new', version='HTTP/1.1', remote_ip='::1')
Traceback (most recent call last):
File "/home/joe/dev/tornado/lib/python3.8/site-packages/tornado/web.py", line 1702, in _execute
result = method(*self.path_args, **self.path_kwargs)
File "server.py", line 89, in post
MessageNewHandler.clients[0].write(message)
File "/home/joe/dev/tornado/lib/python3.8/site-packages/tornado/web.py", line 833, in write
raise RuntimeError("Cannot write() after finish()")
RuntimeError: Cannot write() after finish()
[E 220107 23:18:38 web:2239] 500 POST /a/message/new (::1) 5.98ms
Here is the code:
import asyncio
import tornado.escape
import tornado.ioloop
import tornado.locks
import tornado.web
import os.path
import uuid
from tornado.options import define, options, parse_command_line
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=True, help="run in debug mode")
class Connections(object):
def __init__(self):
self.conns = []
def add_connection(self, conn_self):
self.conns.append(conn_self)
def conns(self):
return self.conns
conns = Connections()
class MessageBuffer(object):
def __init__(self):
# cond is notified whenever the message cache is updated
self.cond = tornado.locks.Condition()
self.cache = []
self.cache_size = 200
def get_messages_since(self, cursor):
"""Returns a list of messages newer than the given cursor.
``cursor`` should be the ``id`` of the last message received.
"""
results = []
for msg in reversed(self.cache):
if msg["id"] == cursor:
break
results.append(msg)
results.reverse()
return results
def add_message(self, message):
self.cache.append(message)
if len(self.cache) > self.cache_size:
self.cache = self.cache[-self.cache_size :]
self.cond.notify_all()
# Making this a non-singleton is left as an exercise for the reader.
global_message_buffer = MessageBuffer()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("index.html", messages=global_message_buffer.cache)
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
def post(self):
message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
# render_string() returns a byte string, which is not supported
# in json, so we must convert it to a character string.
message["html"] = tornado.escape.to_unicode(
self.render_string("message.html", message=message)
)
conns.add_connection(self)
if (len(conns.conns)>2):
conns.conns[0].write(message)
self.finish()
class MessageUpdatesHandler(tornado.web.RequestHandler):
"""Long-polling request for new messages.
Waits until new messages are available before returning anything.
"""
async def post(self):
cursor = self.get_argument("cursor", None)
messages = global_message_buffer.get_messages_since(cursor)
while not messages:
# Save the Future returned here so we can cancel it in
# on_connection_close.
self.wait_future = global_message_buffer.cond.wait()
try:
await self.wait_future
except asyncio.CancelledError:
return
messages = global_message_buffer.get_messages_since(cursor)
if self.request.connection.stream.closed():
return
self.write(dict(messages=messages))
def on_connection_close(self):
self.wait_future.cancel()
def main():
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/a/message/new", MessageNewHandler),
(r"/a/message/updates", MessageUpdatesHandler),
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
)
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
You're writing to an already closed connection, that's why you're seeing the error.
If you want to write to a previously connected client, you've keep that connection open.
However, this - conns.add_connection(self) - doesn't make sense to track regular http connections.
You should consider using websockets if you want to keep previous connections open and track them.
Update: Here's how you can keep a connection open. If I understand correctly, you want to send a message from current client to the previous client.
1. Using tornado.locks.Condition():
import tornado.locks
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
clients = []
condition = tornado.locks.Condition()
async def post(self):
message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
# render_string() returns a byte string, which is not supported
# in json, so we must convert it to a character string.
message["html"] = tornado.escape.to_unicode(
self.render_string("message.html", message=message)
)
MessageNewHandler.clients.append(self)
if len(MessageNewHandler.clients) < 2:
# less than 2 clients
# wait until notified
await MessageNewHandler.condition.wait()
else:
# at least 2 clients
# write to previous client's response
MessageNewHandler.clients[0].finish(message)
# notify the first waiting client
# so it can send the response
MessageNewHandler.condition.notify()
# Note: since you've finished previous client's response
# you should also remove it from clients list
# since you can't use that connection again
2. Using tornado.concurrent.Future():
import tornado.concurrent
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
waiters = []
async def post(self):
message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
# render_string() returns a byte string, which is not supported
# in json, so we must convert it to a character string.
message["html"] = tornado.escape.to_unicode(
self.render_string("message.html", message=message)
)
future = tornado.concurrent.Future() # create a future
# instead of a saving the reference to the client,
# save the future
MessageNewHandler.waiters.append(future)
if len(MessageNewHandler.waiters) < 2:
# less than 2 clients
# wait for next client's message
msg_from_next_client = await future
# the future will resolve when the next client
# sets a result on it
# then python will execute the following code
self.finish(msg_from_next_client)
# Note: since you've finished this connection
# you should remove this future from the waiters list
# since you can't reuse this connection again
else:
# at least 2 clients
# set the current client's message
# as a result on previous client's future
previous_client_future = MessageNewHandler.waiters[0]
if not previous_client_future.done():
# only set a result if you haven't set it already
# otherwise you'll get an error
previous_client_future.set_result(message)
3: A more practical example using tornado.concurrent.Future():
import tornado.concurrent
class Queue:
"""We'll keep the future related code in this class.
This will allow us to present a cleaner, more intuitive usage api.
"""
waiters = []
#classmethod
def get_message_from_next_client(cls):
future = tornado.concurrent.Future()
cls.waiters.append(future)
return future
#classmethod
def send_message_to_prev_client(cls, message):
previous_client_future = cls.waiters[0]
if not previous_client_future.done():
previous_client_future.set_result(message)
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
async def post(self):
message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
message["html"] = tornado.escape.to_unicode(
self.render_string("message.html", message=message)
)
if len(Queue.waiters) < 2:
msg_from_next_client = await Queue.get_message_from_next_client()
self.finish(msg_from_next_client)
else:
Queue.send_message_to_prev_client(message)
I had a look at the RequestHandler code on https://github.com/tornadoweb/tornado/blob/master/tornado/web.py
I got rid of the Connection class and changed the MessageNewHandler to this..
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
clients =[]
def post(self):
self._auto_finish =False
message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
# render_string() returns a byte string, which is not supported
# in json, so we must convert it to a character string.
message["html"] = tornado.escape.to_unicode(
self.render_string("message.html", message=message)
)
MessageNewHandler.clients.append(self)
if len(MessageNewHandler.clients)>1:
MessageNewHandler.clients[0].finish(message)
So the 2 key things that made it work were self._auto_finish =False
and MessageNewHandler.clients[0].finish(message)

convert synchronous websocket callbacks to async in python using asyncio

need you assistance to convert asynchronous websocket callbacks to async using asyncio in python 3.6.
Some brokers in India provide a websocket for tick data to their users and users can further use this tick data, one such brokers sample code(from their git repo) is pasted here
`from smartapi import SmartWebSocket
# feed_token=092017047
FEED_TOKEN="YOUR_FEED_TOKEN"
CLIENT_CODE="YOUR_CLIENT_CODE"
# token="mcx_fo|224395"
token="EXCHANGE|TOKEN_SYMBOL" #SAMPLE: nse_cm|2885&nse_cm|1594&nse_cm|11536&nse_cm|3045
# token="mcx_fo|226745&mcx_fo|220822&mcx_fo|227182&mcx_fo|221599"
task="mw" # mw|sfi|dp
ss = SmartWebSocket(FEED_TOKEN, CLIENT_CODE)
def on_message(ws, message):
print("Ticks: {}".format(message))
def on_open(ws):
print("on open")
ss.subscribe(task,token)
def on_error(ws, error):
print(error)
def on_close(ws):
print("Close")
# Assign the callbacks.
ss._on_open = on_open
ss._on_message = on_message
ss._on_error = on_error
ss._on_close = on_close
ss.connect()
so what I think the broker is doing is that it provides messages/ticks aka events that probably trigger a callback function on every received messages/tick in a synchronous way.
I however want to alter the above code so that it can work in an async manner, I have tried to write in a manner how binance provides such as async with websocketName('currencyPair')as ts: but this doesn't seem to work with my broker.
Appreciate if you can share some ideas/code/insights around this.
Thank you for your time.
Edit 1:
Thank you for your response Dirn,
Yes, I've tried this
from smartapi import SmartConnect
from smartapi import SmartWebSocket
import logging
import asyncio
CLIENT_CODE = Code_provided_by_broker
PASSWORD = My_Password
obj=SmartConnect(api_key=My_api_key)
data = obj.generateSession(CLIENT_CODE, PASSWORD)
refreshToken = data['data']['refreshToken']
print("refreshToken", refreshToken)
feedToken=obj.getfeedToken()
print("feedToken", feedToken)
FEED_TOKEN= feedToken
token="nse_cm|3499"
# token="nse_fo|53595"
task = 'mw'
async def on_message(ws, message):
print("Ticks: {}".format(message))
async def chk_msg():
# ss = SmartWebSocket(FEED_TOKEN, CLIENT_CODE)
# ss.subscribe(task,token)
# ss._on_message = on_message
# ss.connect()
# print("Ticks: {}".format(message))
async with SmartWebSocket(FEED_TOKEN, CLIENT_CODE) as ss: #throws an error: AttributeError: __aexit__
ss.subscribe(task,token)
ss.connect()
while True:
ss._on_message = on_message # not sure how to await a non-async method _on_message, also there is no use of async module in the modules(imported at start of the script) SmartWebSocket and SmartConnect
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(chk_msg())
here is the smartConnect.py
from six.moves.urllib.parse import urljoin
import sys
import csv
import json
import dateutil.parser
import hashlib
import logging
import datetime
import smartapi.smartExceptions as ex
import requests
from requests import get
import re, uuid
import socket
import platform
from smartapi.version import __version__, __title__
log = logging.getLogger(__name__)
#user_sys=platform.system()
#print("the system",user_sys)
class SmartConnect(object):
#_rootUrl = "https://openapisuat.angelbroking.com"
_rootUrl="https://apiconnect.angelbroking.com" #prod endpoint
#_login_url ="https://smartapi.angelbroking.com/login"
_login_url="https://smartapi.angelbroking.com/publisher-login" #prod endpoint
_default_timeout = 7 # In seconds
_routes = {
"api.login":"/rest/auth/angelbroking/user/v1/loginByPassword",
"api.logout":"/rest/secure/angelbroking/user/v1/logout",
"api.token": "/rest/auth/angelbroking/jwt/v1/generateTokens",
"api.refresh": "/rest/auth/angelbroking/jwt/v1/generateTokens",
"api.user.profile": "/rest/secure/angelbroking/user/v1/getProfile",
"api.order.place": "/rest/secure/angelbroking/order/v1/placeOrder",
"api.order.modify": "/rest/secure/angelbroking/order/v1/modifyOrder",
"api.order.cancel": "/rest/secure/angelbroking/order/v1/cancelOrder",
"api.order.book":"/rest/secure/angelbroking/order/v1/getOrderBook",
"api.ltp.data": "/rest/secure/angelbroking/order/v1/getLtpData",
"api.trade.book": "/rest/secure/angelbroking/order/v1/getTradeBook",
"api.rms.limit": "/rest/secure/angelbroking/user/v1/getRMS",
"api.holding": "/rest/secure/angelbroking/portfolio/v1/getHolding",
"api.position": "/rest/secure/angelbroking/order/v1/getPosition",
"api.convert.position": "/rest/secure/angelbroking/order/v1/convertPosition",
"api.gtt.create":"/gtt-service/rest/secure/angelbroking/gtt/v1/createRule",
"api.gtt.modify":"/gtt-service/rest/secure/angelbroking/gtt/v1/modifyRule",
"api.gtt.cancel":"/gtt-service/rest/secure/angelbroking/gtt/v1/cancelRule",
"api.gtt.details":"/rest/secure/angelbroking/gtt/v1/ruleDetails",
"api.gtt.list":"/rest/secure/angelbroking/gtt/v1/ruleList",
"api.candle.data":"/rest/secure/angelbroking/historical/v1/getCandleData"
}
try:
clientPublicIp= " " + get('https://api.ipify.org').text
if " " in clientPublicIp:
clientPublicIp=clientPublicIp.replace(" ","")
hostname = socket.gethostname()
clientLocalIp=socket.gethostbyname(hostname)
except Exception as e:
print("Exception while retriving IP Address,using local host IP address",e)
finally:
clientPublicIp="106.193.147.98"
clientLocalIp="127.0.0.1"
clientMacAddress=':'.join(re.findall('..', '%012x' % uuid.getnode()))
accept = "application/json"
userType = "USER"
sourceID = "WEB"
def __init__(self, api_key=None, access_token=None, refresh_token=None,feed_token=None, userId=None, root=None, debug=False, timeout=None, proxies=None, pool=None, disable_ssl=False,accept=None,userType=None,sourceID=None,Authorization=None,clientPublicIP=None,clientMacAddress=None,clientLocalIP=None,privateKey=None):
self.debug = debug
self.api_key = api_key
self.session_expiry_hook = None
self.disable_ssl = disable_ssl
self.access_token = access_token
self.refresh_token = refresh_token
self.feed_token = feed_token
self.userId = userId
self.proxies = proxies if proxies else {}
self.root = root or self._rootUrl
self.timeout = timeout or self._default_timeout
self.Authorization= None
self.clientLocalIP=self.clientLocalIp
self.clientPublicIP=self.clientPublicIp
self.clientMacAddress=self.clientMacAddress
self.privateKey=api_key
self.accept=self.accept
self.userType=self.userType
self.sourceID=self.sourceID
if pool:
self.reqsession = requests.Session()
reqadapter = requests.adapters.HTTPAdapter(**pool)
self.reqsession.mount("https://", reqadapter)
print("in pool")
else:
self.reqsession = requests
# disable requests SSL warning
requests.packages.urllib3.disable_warnings()
def requestHeaders(self):
return{
"Content-type":self.accept,
"X-ClientLocalIP": self.clientLocalIp,
"X-ClientPublicIP": self.clientPublicIp,
"X-MACAddress": self.clientMacAddress,
"Accept": self.accept,
"X-PrivateKey": self.privateKey,
"X-UserType": self.userType,
"X-SourceID": self.sourceID
}
def setSessionExpiryHook(self, method):
if not callable(method):
raise TypeError("Invalid input type. Only functions are accepted.")
self.session_expiry_hook = method
def getUserId():
return userId
def setUserId(self,id):
self.userId=id
def setAccessToken(self, access_token):
self.access_token = access_token
def setRefreshToken(self, refresh_token):
self.refresh_token = refresh_token
def setFeedToken(self,feedToken):
self.feed_token=feedToken
def getfeedToken(self):
return self.feed_token
def login_url(self):
"""Get the remote login url to which a user should be redirected to initiate the login flow."""
return "%s?api_key=%s" % (self._login_url, self.api_key)
def _request(self, route, method, parameters=None):
"""Make an HTTP request."""
params = parameters.copy() if parameters else {}
uri =self._routes[route].format(**params)
url = urljoin(self.root, uri)
# Custom headers
headers = self.requestHeaders()
if self.access_token:
# set authorization header
auth_header = self.access_token
headers["Authorization"] = "Bearer {}".format(auth_header)
if self.debug:
log.debug("Request: {method} {url} {params} {headers}".format(method=method, url=url, params=params, headers=headers))
try:
r = requests.request(method,
url,
data=json.dumps(params) if method in ["POST", "PUT"] else None,
params=json.dumps(params) if method in ["GET", "DELETE"] else None,
headers=headers,
verify=not self.disable_ssl,
allow_redirects=True,
timeout=self.timeout,
proxies=self.proxies)
except Exception as e:
raise e
if self.debug:
log.debug("Response: {code} {content}".format(code=r.status_code, content=r.content))
# Validate the content type.
if "json" in headers["Content-type"]:
try:
data = json.loads(r.content.decode("utf8"))
except ValueError:
raise ex.DataException("Couldn't parse the JSON response received from the server: {content}".format(
content=r.content))
# api error
if data.get("error_type"):
# Call session hook if its registered and TokenException is raised
if self.session_expiry_hook and r.status_code == 403 and data["error_type"] == "TokenException":
self.session_expiry_hook()
# native errors
exp = getattr(ex, data["error_type"], ex.GeneralException)
raise exp(data["message"], code=r.status_code)
return data
elif "csv" in headers["Content-type"]:
return r.content
else:
raise ex.DataException("Unknown Content-type ({content_type}) with response: ({content})".format(
content_type=headers["Content-type"],
content=r.content))
def _deleteRequest(self, route, params=None):
"""Alias for sending a DELETE request."""
return self._request(route, "DELETE", params)
def _putRequest(self, route, params=None):
"""Alias for sending a PUT request."""
return self._request(route, "PUT", params)
def _postRequest(self, route, params=None):
"""Alias for sending a POST request."""
return self._request(route, "POST", params)
def _getRequest(self, route, params=None):
"""Alias for sending a GET request."""
return self._request(route, "GET", params)
def generateSession(self,clientCode,password):
params={"clientcode":clientCode,"password":password}
loginResultObject=self._postRequest("api.login",params)
if loginResultObject['status']==True:
jwtToken=loginResultObject['data']['jwtToken']
self.setAccessToken(jwtToken)
refreshToken=loginResultObject['data']['refreshToken']
feedToken=loginResultObject['data']['feedToken']
self.setRefreshToken(refreshToken)
self.setFeedToken(feedToken)
user=self.getProfile(refreshToken)
id=user['data']['clientcode']
#id='D88311'
self.setUserId(id)
user['data']['jwtToken']="Bearer "+jwtToken
user['data']['refreshToken']=refreshToken
return user
else:
return loginResultObject
def terminateSession(self,clientCode):
logoutResponseObject=self._postRequest("api.logout",{"clientcode":clientCode})
return logoutResponseObject
def generateToken(self,refresh_token):
response=self._postRequest('api.token',{"refreshToken":refresh_token})
jwtToken=response['data']['jwtToken']
feedToken=response['data']['feedToken']
self.setFeedToken(feedToken)
self.setAccessToken(jwtToken)
return response
def renewAccessToken(self):
response =self._postRequest('api.refresh', {
"jwtToken": self.access_token,
"refreshToken": self.refresh_token,
})
tokenSet={}
if "jwtToken" in response:
tokenSet['jwtToken']=response['data']['jwtToken']
tokenSet['clientcode']=self. userId
tokenSet['refreshToken']=response['data']["refreshToken"]
return tokenSet
def getProfile(self,refreshToken):
user=self._getRequest("api.user.profile",{"refreshToken":refreshToken})
return user
def placeOrder(self,orderparams):
params=orderparams
for k in list(params.keys()):
if params[k] is None :
del(params[k])
orderResponse= self._postRequest("api.order.place", params)['data']['orderid']
return orderResponse
def modifyOrder(self,orderparams):
params = orderparams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
orderResponse= self._postRequest("api.order.modify", params)
return orderResponse
def cancelOrder(self, order_id,variety):
orderResponse= self._postRequest("api.order.cancel", {"variety": variety,"orderid": order_id})
return orderResponse
def ltpData(self,exchange,tradingsymbol,symboltoken):
params={
"exchange":exchange,
"tradingsymbol":tradingsymbol,
"symboltoken":symboltoken
}
ltpDataResponse= self._postRequest("api.ltp.data",params)
return ltpDataResponse
def orderBook(self):
orderBookResponse=self._getRequest("api.order.book")
return orderBookResponse
def tradeBook(self):
tradeBookResponse=self._getRequest("api.trade.book")
return tradeBookResponse
def rmsLimit(self):
rmsLimitResponse= self._getRequest("api.rms.limit")
return rmsLimitResponse
def position(self):
positionResponse= self._getRequest("api.position")
return positionResponse
def holding(self):
holdingResponse= self._getRequest("api.holding")
return holdingResponse
def convertPosition(self,positionParams):
params=positionParams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
convertPositionResponse= self._postRequest("api.convert.position",params)
return convertPositionResponse
def gttCreateRule(self,createRuleParams):
params=createRuleParams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
createGttRuleResponse=self._postRequest("api.gtt.create",params)
#print(createGttRuleResponse)
return createGttRuleResponse['data']['id']
def gttModifyRule(self,modifyRuleParams):
params=modifyRuleParams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
modifyGttRuleResponse=self._postRequest("api.gtt.modify",params)
#print(modifyGttRuleResponse)
return modifyGttRuleResponse['data']['id']
def gttCancelRule(self,gttCancelParams):
params=gttCancelParams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
#print(params)
cancelGttRuleResponse=self._postRequest("api.gtt.cancel",params)
#print(cancelGttRuleResponse)
return cancelGttRuleResponse
def gttDetails(self,id):
params={
"id":id
}
gttDetailsResponse=self._postRequest("api.gtt.details",params)
return gttDetailsResponse
def gttLists(self,status,page,count):
if type(status)== list:
params={
"status":status,
"page":page,
"count":count
}
gttListResponse=self._postRequest("api.gtt.list",params)
#print(gttListResponse)
return gttListResponse
else:
message="The status param is entered as" +str(type(status))+". Please enter status param as a list i.e., status=['CANCELLED']"
return message
def getCandleData(self,historicDataParams):
params=historicDataParams
for k in list(params.keys()):
if params[k] is None:
del(params[k])
getCandleDataResponse=self._postRequest("api.candle.data",historicDataParams)
return getCandleDataResponse
def _user_agent(self):
return (__title__ + "-python/").capitalize() + __version__
the smartApiWebsocket.py
import websocket
import six
import base64
import zlib
import datetime
import time
import json
import threading
import ssl
class SmartWebSocket(object):
ROOT_URI='wss://wsfeeds.angelbroking.com/NestHtml5Mobile/socket/stream'
HB_INTERVAL=30
HB_THREAD_FLAG=False
WS_RECONNECT_FLAG=False
feed_token=None
client_code=None
ws=None
task_dict = {}
def __init__(self, FEED_TOKEN, CLIENT_CODE):
self.root = self.ROOT_URI
self.feed_token = FEED_TOKEN
self.client_code = CLIENT_CODE
if self.client_code == None or self.feed_token == None:
return "client_code or feed_token or task is missing"
def _subscribe_on_open(self):
request = {"task": "cn", "channel": "NONLM", "token": self.feed_token, "user": self.client_code,
"acctid": self.client_code}
print(request)
self.ws.send(
six.b(json.dumps(request))
)
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def run(self):
while True:
# More statements comes here
if self.HB_THREAD_FLAG:
break
print(datetime.datetime.now().__str__() + ' : Start task in the background')
self.heartBeat()
time.sleep(self.HB_INTERVAL)
def subscribe(self, task, token):
# print(self.task_dict)
self.task_dict.update([(task,token),])
# print(self.task_dict)
if task in ("mw", "sfi", "dp"):
strwatchlistscrips = token # dynamic call
try:
request = {"task": task, "channel": strwatchlistscrips, "token": self.feed_token,
"user": self.client_code, "acctid": self.client_code}
self.ws.send(
six.b(json.dumps(request))
)
return True
except Exception as e:
self._close(reason="Error while request sending: {}".format(str(e)))
raise
else:
print("The task entered is invalid, Please enter correct task(mw,sfi,dp) ")
def resubscribe(self):
for task, marketwatch in self.task_dict.items():
print(task, '->', marketwatch)
try:
request = {"task": task, "channel": marketwatch, "token": self.feed_token,
"user": self.client_code, "acctid": self.client_code}
self.ws.send(
six.b(json.dumps(request))
)
return True
except Exception as e:
self._close(reason="Error while request sending: {}".format(str(e)))
raise
def heartBeat(self):
try:
request = {"task": "hb", "channel": "", "token": self.feed_token, "user": self.client_code,
"acctid": self.client_code}
print(request)
self.ws.send(
six.b(json.dumps(request))
)
except:
print("HeartBeat Sending Failed")
# time.sleep(60)
def _parse_text_message(self, message):
"""Parse text message."""
data = base64.b64decode(message)
try:
data = bytes((zlib.decompress(data)).decode("utf-8"), 'utf-8')
data = json.loads(data.decode('utf8').replace("'", '"'))
data = json.loads(json.dumps(data, indent=4, sort_keys=True))
except ValueError:
return
# return data
if data:
self._on_message(self.ws,data)
def connect(self):
# websocket.enableTrace(True)
self.ws = websocket.WebSocketApp(self.ROOT_URI,
on_message=self.__on_message,
on_close=self.__on_close,
on_open=self.__on_open,
on_error=self.__on_error)
self.ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
def __on_message(self, ws, message):
self._parse_text_message(message)
# print(msg)
def __on_open(self, ws):
print("__on_open################")
self.HB_THREAD_FLAG = False
self._subscribe_on_open()
if self.WS_RECONNECT_FLAG:
self.WS_RECONNECT_FLAG = False
self.resubscribe()
else:
self._on_open(ws)
def __on_close(self, ws):
self.HB_THREAD_FLAG = True
print("__on_close################")
self._on_close(ws)
def __on_error(self, ws, error):
if ( "timed" in str(error) ) or ( "Connection is already closed" in str(error) ) or ( "Connection to remote host was lost" in str(error) ):
self.WS_RECONNECT_FLAG = True
self.HB_THREAD_FLAG = True
if (ws is not None):
ws.close()
ws.on_message = None
ws.on_open = None
ws.close = None
# print (' deleting ws')
del ws
self.connect()
else:
print ('Error info: %s' %(error))
self._on_error(ws, error)
def _on_message(self, ws, message):
pass
def _on_open(self, ws):
pass
def _on_close(self, ws):
pass
def _on_error(self, ws, error):
pass
the provider has also imported the websocket module in the above smartApiWebsocket file and has also provided a custom webSocket.py
here is the link webSocket.py
enter link description here

Unable to access Python instance variables inside Celery Tasks

I want to have a MongoDB operation in a celery task but im facing some issues while accessing the instance variables.
Heres the code
app = CeleryConnector().app
class CeleryTasks:
def __init__(self, port, docID, dbName, collName, data):
self.port = port
self.docID = docID
self.dbName = dbName
self.collName = collName
self.data = data
self.client = MongoClient(host='localhost', port=port, username='admin', password='password')
self.db = self.client[dbName]
print ("CeleryTasks:init")
#app.task
def createDoc(dbName, collName, data):
print ("CeleryTasks:CreateDoc")
if 'refs' not in data:
return
# Here is the error I dont know how to access the client variable here.
#db = client[dbName]
print(data['refs'])
for id in data['refs']:
doc = db[collName].find_one({'_id': id})
if doc is None:
insertedID = db[collName].insert_one({
"_id": id
})
print (insertedID)
#app.task(bind=True)
def createDoc(self, dbName, collName, data):
print ("CeleryTasks:CreateDoc")
if 'refs' not in data:
return
print(data['refs'])
for id in data['refs']:
doc = self.db[collName].find_one({'_id': id})
if doc is None:
insertedID = self.db[collName].insert_one({
"_id": id
})
print (insertedID)
As we cannot pass non JSON serializable objects to a Task so passing db or client is not an option.
Problem with the first function
I dont know how to access client inside it. Tried a few things but failed.
Problem with the second function
It gives an error doc = self.db[collName].find_one({'_id': id})
AttributeError: 'createDoc' object has no attribute 'db'
etc etc
How to make this work or how to access instance variables inside celery tasks?

Tornado testing async requests

I need an advice regards testing tornado app. For now I just playing with demo chat application, but it looks like real-life problem.
In the handler I have:
class MessageUpdatesHandler(BaseHandler):
#tornado.web.authenticated
#tornado.web.asynchronous
def post(self):
cursor = self.get_argument("cursor", None)
global_message_buffer.wait_for_messages(self.on_new_messages,
cursor=cursor)
def on_new_messages(self, messages):
# Closed client connection
if self.request.connection.stream.closed():
return
self.finish(dict(messages=messages))
class MessageBuffer(object):
def __init__(self):
....
def wait_for_messages(self, callback, cursor=None):
if cursor:
new_count = 0
for msg in reversed(self.cache):
if msg["id"] == cursor:
break
new_count += 1
if new_count:
callback(self.cache[-new_count:])
return
self.waiters.add(callback)
def cancel_wait(self, callback):
.....
def new_messages(self, messages):
logging.info("Sending new message to %r listeners", len(self.waiters))
for callback in self.waiters:
try:
callback(messages)
except:
logging.error("Error in waiter callback", exc_info=True)
self.waiters = set()
self.cache.extend(messages)
if len(self.cache) > self.cache_size:
self.cache = self.cache[-self.cache_size:]
As I metioned full source code is in torndado demos
In my test I have:
#wsgi_safe
class MessageUpdatesHandlerTest(LoginedUserHanldersTest):
Handler = MessageUpdatesHandler
def test_add_message(self):
from chatdemo import global_message_buffer
kwargs = dict(
method="POST",
body='',
)
future = self.http_client.fetch(self.get_url('/'), callback=self.stop, **kwargs)
message = {
"id": '123',
"from": "first_name",
"body": "hello",
"html": "html"
}
global_message_buffer.new_messages([message])
response = self.wait()
self.assertEqual(response.code, 200)
self.mox.VerifyAll()
What happens:
It creates a future object
It sends a hello message, in this moment no waiter is registered
in MessageBuffer so callback is not called
In wait starts IoLoop and makes, a post fetch and waiter becomes
registered in MessageBuffer
Callback is never called and my response remains empty, so
everything fails with
AssertionError: Async operation timed out
after 5 seconds
What I want it to do:
On post register itself as a waiter
Receive some messages
Return to me a 200 response
Thank you for your help

Categories