How do you send a message to a Django Consumer from a custom manage.py command
from django.core.management.base import BaseCommand, CommandError
from channels import Channel
class Command(BaseCommand):
help = 'Sends a message to a Django channel from the thing'
def add_arguments(self, parser):
parser.add_argument('json_object', nargs='+', type=str)
def handle(self, *args, **options):
self.stdout.write("TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print Channel("test").channel_layer
Channel("test").send({'op':options['json_object'][0]})
This is my consumer
class MyConsumer(WebsocketConsumer):
#classmethod
def channel_names(self):
return {"test"}
def connection_groups(self):
return ["test"]
def dispatch(self, message, **kwargs):
return self.get_handler(message, **kwargs)(message, **kwargs)
def get_handler(self, message, **kwargs):
channel_routing = [
consumers.MyConsumer.as_route(path=r"^/test/"),
route("test.receive", consumers.chat_join),
] for _filter, value in kwargs.items():
filter_mapping = getattr(self, _filter + '_mapping', None)
if not filter_mapping:
continue
consumer = getattr(self, filter_mapping.get(value), None)
if consumer:
return consumer
raise ValueError('Message')
def connect(self,message):
self.message.reply_channel.send({"accept": True})
def receive(self,text=None, bytes= None):
print text
def disconnect(self,message):
pass
When I try to run the command however, I get this message
2017-03-08 03:45:33,839 - ERROR - worker - Could not find match for message on test! Check your routing.
In case it is pertinent, here is my routing
channel_routing = [
consumers.MyConsumer.as_route(path=r"^/test/"),
]
In short, add the path to the content you'd like to send:
Channel("test").send({
'op':options['json_object'][0],
'path': '/test/',
})
And that's it!
I ran into the same problem and I found out that it is because I am using the as_route method of generic consumer to generate route_class, which always has path as its filter.
If we use route instead we do not necessarily provide the path argument and that's the reason why the code at the doc (https://channels.readthedocs.io/en/stable/getting-started.html#models) works
Related
Using Django, I need to poll updates from a GraphQL subscription and being able to turn the updates on and off.
My implementation uses websockets package wrapped in an async function to poll updates from a local container providing a GraphQL subscription and stores them in Django database.
I need to find a way to control this polling feature with an on/off GraphQL mutation that would start or stop the readings and database updates.
I've tried using celery by starting a celery task upon Django apps.py ready() method, but I think it became overkill and I ended up having multiple tasks being too hard to manage.
I've also thought about using a database record to keep the status and run the asynchronous polling code in a management command, but it seems not a great idea to continuously read the feed status from database without any hook or so.
My last attempt was to trigger a GraphQL mutation on my own service using a management command at the start of my docker-compose fleet to start the readings, but the asyncio event loop ends up locking the main thread for some reason.
Here's my current implementation unsing asyncio :
""" Feed listener class, responsible for connecting to the feed and processing temperature updates """
class FeedListener:
__instance = None
read: bool = False
task: asyncio.Task = None
def __new__(cls,*args, **kwargs):
""" Singleton implementation """
if FeedListener.__instance is None :
FeedListener.__instance = super(FeedListener, cls).__new__(cls, *args, **kwargs)
return FeedListener.__instance
def start_feed_readings(self) -> None:
""" Starts the feed listener if it is not already running """
self.read = True
if not self.task:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
tasks = [
asyncio.ensure_future(FeedListener.capture_data()),
]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
def stop_feed_readings(self) -> None:
""" Stops the feed listener if it is running """
self.read = False
self.task = None
#staticmethod
async def capture_data():
""" Connects to the feed, processes temperature updates and stores them in the database """
uri: str = f"ws://{settings.TEMPERATURE_FEED['HOST']}:{settings.TEMPERATURE_FEED['PORT']}/graphql"
start: dict = {
"type": "start",
"payload": {"query": "subscription { temperature }"}
}
async with websockets.connect(uri, subprotocols=["graphql-ws"]) as websocket:
print("Connected to feed")
await websocket.send(json.dumps(start))
while True:
data = json.loads(await websocket.recv())
print(data)
sync_to_async(TemperatureRecord.objects.create)(value=data["payload"]["data"]["temperature"])
Started by the following management command :
""" This management command is used to control the reading status of the feed.
It is used to start and stop the feed readings through a graphql mutation to the main app."""
class Command(BaseCommand):
help = 'Controls the reading status of the feed'
def add_arguments(self, parser):
parser.add_argument('status', nargs='+', type=str)
def handle(self, *args, **options):
status: str = options['status'][0]
if (status != "on" and status != "off"):
raise CommandError("Invalid status")
query = """
mutation {
toggleFeed(input: {status: "%s"}) {
status
}
}
""" % status
url = "http://app:8000/graphql"
response = requests.post(url, json={'query': query})
GraphQL mutation code :
class ToggleFeedMutation(graphene.Mutation):
"""
Mutation to toggle the feed on and off
"""
class Arguments:
input = ToggleFeedInputType(required=True)
status = graphene.String()
""" Toggles the feed status on and off. Throws an exception if the status is invalid """
def mutate(self, info, input):
if (input.status != "on" and input.status != "off"):
raise Exception("Invalid status")
input.status == "on" and FeedListener().start_feed_readings() or FeedListener().stop_feed_readings()
How would you proceed to achieve this in an elegant way ?
I am currently working with a rails server which is supposed to run python script, which are kafka consumer/producer.
The server must to run the script then receive the processed data from consumer, and render them to the site.
I am able to run a script but can not fund a solution for the consumer to be connected. As the consumer is either running non stop accepting the messages, or running in while loop. I tried to run the consumer first from ruby, which starts the consumer, but never gets the consumer, as it is listening, but the other script could not be run.
So the flow of the message ideally should be something like this -> email from logged user to kafka producer -> MQ -> kafka consumer generates data writes to db -> producer query data from database -> MQ -> consumer accepts the data and renders them to the site.
The ideal scenario would be a one script lets call it manager that does all the work only accepts data and returns it. It also was not able to do that because, the one script also runs consumer and listens for producer, but it is never ran.
so here is my code:
from kafka import KafkaProducer
from faker import Faker
import json
import time
class producer1():
'''
fr_instance= Faker()
def get_new_user():
return {"email_address":fr_instance.email(),"first_name": fr_instance.first_name(),
"lastname": fr_instance.last_name(), "occupation":fr_instance.job()}
'''
def __init__(self):
self
def json_serializer(self, data):
return json.dumps(data).encode("utf-8")
def send(self,email):
print(email)
producer = KafkaProducer(bootstrap_servers='localhost:9092',
value_serializer = self.json_serializer)
registred_user = {"email": email}
future = producer.send("NewUserTopic", registred_user)
print (registred_user)
result = future.get(timeout=10)
p = producer1()
if __name__ == '__main__':
email = "testmail#aaaaaaaa.com"
p.send(email)
then 1st consumer:
from kafka import KafkaConsumer
import json
import random
from sqlalchemy.orm import sessionmaker
import dbservice
import time
class consumer1():
def __init__(self) -> None:
self
def email(self):
consumer = KafkaConsumer('NewUserTopic',
bootstrap_servers='localhost:9092',
auto_offset_reset = 'latest', enable_auto_commit= False)
for msg in consumer:
msg_out = json.loads(msg.value)
for value in msg_out.values():
#return print(msg_out)
return (value)
#generate dummy address , eth
def gen_ETHw (self):
numbers = str(random.randint(11111,99999))
wallet_num = str("Ox"+numbers)
return (wallet_num)
#generate dummy address , btc
def gen_BTCw (self):
numbers = str(random.randint(11111,99999))
wallet_num = str("Ox"+numbers)
return (wallet_num)
def commit_db (self, email, ETHw, BTCw):
Session = sessionmaker(bind=dbservice.engine)
s = Session()
input = dbservice.walletdb( email,ETHw, BTCw)
time.sleep(2)
s.add(input)
s.commit()
if __name__ =='__main__':
while True:
c = consumer1()
c.commit_db(c.email(),c.gen_ETHw(),c.gen_BTCw())
query producer:
import dbservice
import dbservice
from sqlalchemy.orm import sessionmaker
from kafka import KafkaProducer
import json
class query_prod ():
def __init__(self, email) -> None:
self = self
self.email = email
def json_serializer(data):
return json.dumps(data).encode("utf-8")
producer = KafkaProducer(bootstrap_servers='localhost:9092',
value_serializer = json_serializer)
Session = sessionmaker(bind=dbservice.engine)
s = Session()
def query_address(self,email):
Session = sessionmaker(bind=dbservice.engine)
s = Session()
for s in s.query(dbservice.walletdb).filter_by(email=email):
return {"email":s.email,"ETH_w":s.ETH_w,"BTC_w":s.BTC_w}
def send(self, email):
data_to_send = self.query_address(email)
future = self.producer.send("QueryAdressToServer", data_to_send)
print (data_to_send)
result = future.get(timeout=10)
if __name__ == '__main__':
email = "testmail#aaaaaaaa.com"
query_prod=query_prod(email)
query_prod.send(email)
and consume data which should be returned to the site:
from kafka import KafkaConsumer
import json
import time
class consume_for_web():
string=""
def __init__(self) -> None:
self = self
string = self.string
def consumer(self):
consumer = KafkaConsumer('QueryAdressToServer',
bootstrap_servers='localhost:9092',
auto_offset_reset = 'latest', enable_auto_commit= False)
print('starting consumer')
for msg in consumer:
data = (('{}'.format(json.loads(msg.value))))
self.string = self.string + data
return print(data)
def read_str(self):
return print(self.string)
if __name__ =='__main__':
while True:
c = consume_for_web()
c.consumer()
##print("reading")
#c.read_str()
and finally my rails pages controller:
class PagesController < ApplicationController
def home
end
def about
end
before_action :require_login
def genw
our_input = current_user.email
puts our_input
#consumer_result = `python3 /Users/samuelrybar/python_projects/Kafka_demo1/kafka-prod-coms/consumer2.py`
end
def mywa
end
def save
end
end
Thanks for your time and help, I really appreciate it. :))
Not sure why you are trying to run python scripts from a running Rails server. It sounds like a very bad idea to me. You can run Kafka consumers/producers from Ruby and Ruby on Rails directly. I suggest you investigate Karafka. We've been using it successfully at work.
I'm trying to build my fix application with quickfix, but when starting it, it first sends a logout message before logging in, and raises Session not Found.
class Application(quickfix.Application):
def __init__(self, session, logger):
super(Application, self).__init__()
self.session = session
self.logger = logger
def onCreate(self, sessionID):
self.logger.info("Created session {}.".format(sessionID))
return
def onLogon(self, sessionID):
self.logger.info("Logon session {}.".format(sessionID))
return
def onLogout(self, sessionID):
self.logger.info("Logout session {}.".format(sessionID))
return
def toAdmin(self, message, sessionID):
msgType = quickfix.MsgType()
message.getHeader().getField(msgType)
if msgType.getValue() == quickfix.MsgType_Logon:
self.logger.info('LOGON SessionID {}'.format(sessionID))
elif msgType.getValue() == quickfix.MsgType_Logout:
self.logger.info('LOGOUT SessionID {}'.format(sessionID))
self.logger.info('to Admin session {} send {}'.format(sessionID, self.messageToString(message)))
self.session.sendToTarget(message)
return
def toApp(self, message, sessionID):
self.logger.info('to App: {}'.format(message))
self.session.sendToTarget(message)
return
def fromApp(self, message, sessionID):
self.logger.info('from App: {}'.format(message))
return
logger = create_logger(config)
settings = quickfix.SessionSettings(client_config)
application = Application(quickfix.Session, logger)
storeFactory = quickfix.FileStoreFactory(settings)
logFactory = quickfix.ScreenLogFactory(settings)
initiator = quickfix.SocketInitiator(application, storeFactory, settings, logFactory)
initiator.start()
I get the following:
LOGOUT SessionID FIX44:Client->Server
to Admin session FIX44:Client->Server send 8=FIX.4.4|9=62|35=5|34=26|49=Client|52=20200608-12:26:03|56=Server|10=168
File "FIx.py", line 42, in toAdmin self.session.sendToTarget(message)
SessionNotFound: Session Not Found
Any idea why it raises the message please?
Thank you folks!
The from/toApp or from/toAdmin methods are callbacks and you should NOT send the passed message by yourself via Session.sendToTarget.
Instead the message will be sent by quickfix when the callback returns.
from fbchat import log, Client
from ais import abc
# Subclass fbchat.Client and override required methods
class EchoBot(Client):
def onMessage(self, author_id, message_object, thread_id, thread_type, **kwargs):
self.markAsDelivered(author_id, thread_id)
self.markAsRead(author_id)
log.info("{} from {} in {}".format(message_object, thread_id, thread_type.name))
# If you're not the author, echo
if author_id != self.uid:
abc(message_object) <-- HERE IS THE PROBLEM
client = EchoBot("email", "password")
client.listen()
The problem is that message_object instead of being the sent message itself (123123) somehow becomes <Message (id): '123123', mentions=[] emoji_size=None attachments=[]> How can i fix this to get the desired result? (123123)
Try to replace
abc(message_object)
with
abc(message_object.text)
first, I'm sorry about my low level english
I create a website for study
I create send SMS feature using django + redis + celery
tasks/send_buy_sms.py
from celery import Task
from items.utils import SendSMS
class SendBuyMessageTask(Task):
def run(self, buyer_nickname, buyer_phone, saler_phone, selected_bookname):
sms = SendSMS()
sms.send_sms(buyer_nickname, buyer_phone, saler_phone, selected_bookname)
items/utils.py
import os
import requests
import json
class SendSMS():
def send_sms(self, buyer_nickname, buyer_phone, saler_phone, selected_bookname):
appid = [...]
apikey = [...]
sender = '...'
receivers = [saler_phone, ]
content = '...'
url = os.environ.get("URL")
params = {
'sender': sender,
'receivers': receivers,
'content': content,
}
headers = {...}
r = '...'
return params
when I send sms using my code it has no problem
[2017-09-12 17:20:43,532: WARNING/Worker-6] Task success
and I want make log file and insert log "success send SMS" when user click "send sms button"
wef/wef/decorators.py
from django.utils import timezone
import logging
def log_decorator(func):
logging.basicConfig(filename='../../sendsms.log', level=logging.INFO)
def wrap_func(self, *args, **kwargs):
time_stamp = timezone.localtime(timezone.now()).strftime('%Y-%m-%d %H:%M')
logging.info('[{}] success send SMS'.format(time_stamp))
print(logging)
return func(self, *args, **kwargs)
return wrap_func
but when I click 'send sms' button task is Ok , but log file doesn't created...
So I want to know 'what is the problem?'
I change my code create logfile -> print log
wef/wef/decorators.py
from django.utils import timezone
def log_decorator(func):
def wrap_func(self, *args, **kwargs):
time_stamp = timezone.localtime(timezone.now()).strftime('%Y-%m-%d %H:%M')
## print log
print('[{}] succes send sms'.format(timestamp))
## print log
return func(self, *args, **kwargs)
return wrap_func
when I click 'send sms button' the log print in celery
I'm so confused because print() is working but create log file doesn't working...
I think my code(create logging file) is no problem because when I practice create log file without django,celery,redis, log file was created successfully
same code, same feature but not working in django and celery
please give me some advise thank you
I guess you have to add logger like -
# import the logging library
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
def my_view(request, arg1, arg):
...
if bad_mojo:
# Log an error message
logger.error('Something went wrong!')
Here I am assuming that you have configured your loggers, handlers, filters and formatters
For more information visit URL