how to implement client to a grpc-inflection enabled server using python? - python

my goal is to implement a gRPC client that is unfamiliar of the proto file (only knows server/service port number) - the client find the service on run-time (reflection)
my client should be able to:
connect to the server
learn which server capabilities exists (including messages)
send rpc requests to the enable-reflection server
i already see an example in Go & Java and failed to find one in python that work for me.
i successfully used "evans" cli client with my server but i want to implement cli client on my own.
https://github.com/grpc/grpc/blob/master/doc/server_reflection_tutorial.md
i look on this example of how to implementing the server & i can use some help to better understand how to implement the client.
https://github.com/grpc/grpc/blob/master/examples/python/helloworld/greeter_server_with_reflection.py
my questions are:
how to find the current service from the client only from port number?
how to communicate with the server without importing the generated proto files?
server side:
from __future__ import print_function
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
#
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
with grpc.insecure_channel(target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10000)
]) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Timeout in seconds.
# Please refer gRPC Python documents for more detail. https://grpc.io/grpc/python/grpc.html
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
timeout=10)
print("Greeter client received: " + response.message)
if __name__ == '__main__':
logging.basicConfig()
run()
client side:
from __future__ import print_function
import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
logging.basicConfig()
run()

Related

how exactly does gRPC for python handles unix domain sockets

After trying a few things, I got to a point where I can run client-server gRPC connection that is UDS based on the same host. Since I didn't find any detailed documentation for this setup, my main concern is that I don't know if I use gRPC API properly.
Any reference/resource/explanation would be warmly welcomed
sources I looked at:
gRPC python API
gRPC python docs
gRPC server in Python with Unix domain socket (SO)
gRPC connection:
server
import grpc
import my_pb2_grpc
from concurrent import futures
server = None
#...
class Listener(my_pb2_grpc.MyServiceServicer):
#...
def create_connection():
if server is None:
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
my_pb2_grpc.add_MyServiceServicer_to_server(Listener(), server)
server.add_insecure_port("unix:///path/to/uds.sock")
server.start()
def stop_connection():
if server:
server.stop(0)
server = None
client
import grpc
import my_pb2_grpc
channel = None
stub = None
#...
def create_connection():
if channel is None:
channel = grpc.insecure_channel("unix:///path/to/uds.sock")
stub = my_pb2_grpc.MyServiceStub(channel)
def stop_connection():
if channel:
channel.close()
channel = None
stub = None

EV Driver Authorization using RFID in OCPP protocol

I am new to OCPP protocol and I am building a Python OCPP server that can communicate with an EV charger using OCPP protocol. This server has the feature "Authenticate user via RFID". I have created 2 Python files which are Charge_Stattion.py:
# Charge_Stattion.py
import asyncio
import logging
import websockets
from ocpp.v201 import call
from ocpp.v201 import ChargePoint as cp
logging.basicConfig(level=logging.INFO)
class ChargePoint(cp):
async def authentication(self):
request = call.AuthorizePayload(
id_token={'id_token':'AA12345',
'type': 'ISO14443'})
response = await self.call(request)
print(response)
async def main():
async with websockets.connect(
'ws://localhost:9000/CP_1',
subprotocols=['ocpp2.0.1']
) as ws:
cp = ChargePoint('CP_1', ws)
await asyncio.gather(cp.start(), cp.authentication())
if __name__ == '__main__':
asyncio.run(main())
and Central_System.py:
#Central_System.py
import asyncio
import logging
import websockets
from datetime import datetime
from ocpp.routing import on
from ocpp.v201 import ChargePoint as cp
from ocpp.v201 import call_result
from ocpp.v201.enums import AuthorizationStatusType, Action
logging.basicConfig(level=logging.INFO)
class ChargePoint(cp):
#on('BootNotification')
async def on_boot_notification(self, charging_station, reason, **kwargs):
return call_result.BootNotificationPayload(
current_time=datetime.utcnow().isoformat(),
interval=10,
status='Accepted'
)
#on(Action.Authorize)
async def on_authorize(self, id_token):
return call_result.AuthorizePayload(id_token_info={"status": AuthorizationStatusType.accepted})
async def on_connect(websocket, path):
""" For every new charge point that connects, create a ChargePoint
instance and start listening for messages.
"""
try:
requested_protocols = websocket.request_headers[
'Sec-WebSocket-Protocol']
except KeyError:
logging.info("Client hasn't requested any Subprotocol. "
"Closing Connection")
if websocket.subprotocol:
logging.info("Protocols Matched: %s", websocket.subprotocol)
else:
# In the websockets lib if no subprotocols are supported by the
# client and the server, it proceeds without a subprotocol,
# so we have to manually close the connection.
logging.warning('Protocols Mismatched | Expected Subprotocols: %s,'
' but client supports %s | Closing connection',
websocket.available_subprotocols,
requested_protocols)
return await websocket.close()
charge_point_id = path.strip('/')
cp = ChargePoint(charge_point_id, websocket)
logging.info("abcxyz: %s", charge_point_id)
await cp.start()
async def main():
server = await websockets.serve(
on_connect,
'0.0.0.0',
9000,
subprotocols=['ocpp2.0.1']
)
logging.info("WebSocket Server Started")
await server.wait_closed()
if __name__ == '__main__':
asyncio.run(main())
Following the document here, I understand that the user must present an RFID card first, then the Charge Station will send an AuthorizeRequest containing idToken from this RFID card to Central System, then Central System will send and AuthorizeResponse to Charge Station. In the 2 python files above, I have implemented the process Charge Station sends andAuthorizeRequest to Central System and Central System sends back AuthorizeResponse to Charge Station. This picture demonstrates these processes
My questions are:
How can I implement the process EV driver present an RFID card to Charge Station. Should I create 2 other python files which represent EV driver and RFID card?
How can I know whether Center System accept this authentication and how to implement this ?
Any help will be appreciated.
This is a simple flow
EV owner registers himself as a EV client on some server where the server provides an unique id, like "unique-client-id" and stores this value as idTag on a database.
When this client go to charge to some charging station, he inputs that unique id to charging device which sends the id in the following form via websocket connection:
[3, "unique-id-representing-the-current-msg", "Authorize", {"idTag": "unique-client-id"}]
OCPP server receives that message, and looks for received idTag on the database, if it exists it will send back response like below:
[4, "unique-id-representing-the-current-msg", {"idTagInfo": {"status": "Accepted"}}]
I recommend using sanic framework since it has both websocket and http support by default.

rpyc server call client method

I am currently using rpyc for constructing a server and multiple clients that will connect to it. I have data in the clients that I would like to push to the server for further processing, and I would like to do it with the server calling a client method whenever the client connects to the server. From their tutorial, it says that clients can expose their service to the server, but I am getting errors.
Code for my server:
import rpyc
class TestService(rpyc.Service):
def on_connect(self, conn):
conn.root.example()
if __name__ == "__main__":
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(TestService, port=18861, auto_register=True)
t.start()
Code for my client:
import rpyc
class ClientService(rpyc.Service):
def exposed_example(self):
print "example"
if __name__ == "__main__":
try:
rpyc.discover("test")
c = rpyc.connect_by_service("test", service=ClientService)
c.close()
except:
print "could not find server"
The client is able to connect to the server, but there will be an exception in thread and an error: raise EOFError("stream has been closed"). It is complaining about the line conn.root.example() and I don't know what the correct syntax would be, and the tutorial did not specify at all.
Any help will be much appreciated!

Grpc python client server streaming not working as expected

a simple grpc server client, client send a int and server streams int's back.
client is reading the messages one by one but server is running the generator function immediately for all responses.
server code:
import test_pb2_grpc as pb_grpc
import test_pb2 as pb2
import time
import grpc
from concurrent import futures
class test_servcie(pb_grpc.TestServicer):
def Produce(self, request, context):
for i in range(request.val):
print("request came")
rs = pb2.Rs()
rs.st = i + 1
yield rs
def serve():
server =
grpc.server(futures.ThreadPoolExecutor(max_workers=10))
pb_grpc.add_TestServicer_to_server(test_servcie(), server)
server.add_insecure_port('[::]:50051')
print("service started")
server.start()
try:
while True:
time.sleep(3600)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
client code:
import grpc
import test_pb2_grpc as pb_grpc
import test_pb2 as pb
def test():
channel = grpc.insecure_channel(
'{host}:{port}'.format(host="localhost", port=50051))
stub = pb_grpc.TestStub(channel=channel)
req = pb.Rq()
req.val = 20
for s in stub.Produce(req):
print(s.st)
import time
time.sleep(10)
test()
proto file:
syntax = "proto3";
service Test {
rpc Produce (Rq) returns (stream Rs);
}
message Rq{
int32 val = 1;
}
message Rs{
int32 st = 1;
}
after starting the server
when i run the client, server side generator started running and completed immediately it looped for the range.
what i expected is it will one by one as client calls but that is not the case.
is this an expected behaviour. my client is still printing the values but the sever is already completed the function.
Yes, this behavior is expected. gRPC features flow control between the two sides of an RPC (so that generating messages too fast on one side won't exhaust memory on the other side) but there's also an allowance for a small amount of buffering (so that a reasonably small amount of data may be sent by one side before the other side explicitly asks for it). In your case the twenty messages sent from server to client all fit within this small allowance. The service-side gRPC Python runtime is calling your service-side Produce method, consuming its entire output of twenty messages, and sending all those messages across the network to your client, where they are locally held by the invocation-side gRPC Python runtime until your invocation-side test function asks for them.
If you want to see the effects of flow control in action, try using huge messages (one megabyte in size or so) or altering the size of the allowance (I think this is done with a channel argument but those are an advanced and relatively-unsupported feature so this is left as an exercise).

How to make a Tornado websocket client recieve server notifications?

I'm trying to make an application that normally sends a request to a server and receives a response. If it would have been only that, I'd go with HTTP and call it a deal. But some requests to the server make a change for other clients, so I want the server to send all the affected clients a message that they should update.
For that, I've chosen WebSockets protocol and the Tornado library to work with it using Python. The simple message exchange was pretty straightforward, despite the asynchrony. However, the WebSocket client is really not that configurable and I've been struggling to make a client listen for incoming notifications without this interrupting the main message exchange.
The server part is represented by the tornado.websocket.WebSocketHandler, which has an on_message method:
from tornado.websocket import WebSocketHandler
class MyHandler(WebSocketHandler):
def on_message(self, message):
print('message:', message)
And I'd like something like that in the client part, which is only represented by a function tornado.websocket.websocket_connect (source). This function initiates a tornado.websocket.WebSocketClientConnection (source) object, which has an on_message method, but due to the entangled asynchronous structure, I haven't been able to override it properly, without breaking the main message exchange.
Another way I tried to go was the on_message_callback. This sounded like something I could use, but I couldn't figure out how to use it with read_message. This was my best attempt:
import tornado.websocket
import tornado.ioloop
ioloop = tornado.ioloop.IOLoop.current()
def clbk(message):
print('received', message)
async def main():
url = 'server_url_here'
conn = await tornado.websocket.websocket_connect(url, io_loop = ioloop, on_message_callback=clbk)
while True:
print(await conn.read_message()) # The execution hangs here
st = input()
conn.write_message(st)
ioloop.run_sync(main)
With this being the server code:
import tornado.ioloop
import tornado.web
import tornado.websocket
import os
class EchoWebSocket(tornado.websocket.WebSocketHandler):
def open(self):
self.write_message('hello')
def on_message(self, message):
self.write_message(message)
self.write_message('notification')
if __name__ == "__main__":
app = tornado.web.Application([(r"/", EchoWebSocket)])
app.listen(os.getenv('PORT', 8080))
tornado.ioloop.IOLoop.current().start()
I don't know what's going on here. Am I even going in the right direction with this?
There are two issues here:
Use on_message_callback or loop on await read_message(), but not both. If you give a callback the messages will only be passed to that callback and not saved for use by read_message.
input is blocking and doesn't play well with Tornado. It's fine in this little toy demo but if you want to do something like this in production you'll probably want to do something like wrap a PipeIOStream around sys.stdin and use stream.read_until('\n').

Categories