Combine GPT3 with RASA - python

I am trying to integrate rasa with gpt3, but not getting the proper response. Can help me out to look at my code and tell me issue.
def gpt3(text):
response = openai.Completion.create(
model="code-cushman-001",
# engine="ada",
prompt="\n\n" + text,
temperature=0,
logprobs=10,
max_tokens=150,
top_p=0,
frequency_penalty=0,
presence_penalty=0,
stop=[" \n\n"]
)
return response['choices'][0]['text']
action.py
class ActionDefaultFallback(Action):
def init(self):
# self.gpt3 = gpt3()
super()._init_()
def name(self) -> Text:
return "action_default_fallback"
async def run(self, dispatcher, tracker, domain):
query = tracker.latest_message['text']
dispatcher.utter_message(text=gpt3(query))
return [UserUtteranceReverted()]
Not able to understand the issue. Help me out to solve this.
Thanks

Hope this is resolved. In case someone following-up on this. Since the exact error is not provided not sure answer is correct but I was able to work this with very little modification to above code.
def gpt3(text):
try:
response = openai.Completion.create(
# model="text-davinci-003",
model="text-ada-001",
prompt="\n\n" + text + "?",
temperature=0,
logprobs=10,
max_tokens=100,
top_p=0,
frequency_penalty=0,
presence_penalty=0,
stop=[" \n\n"]
)
return response['choices'][0]['text']
except Exception as e:
logger.error('openai request failed')
actions.py
class ActionDefaultFallback(Action):
def init(self):
super().__init__()
def name(self) -> Text:
return "action_default_fallback"
async def run(self, dispatcher, tracker, domain):
query = tracker.latest_message['text']
return dispatcher.utter_message(text=gpt3(query))

Related

Change "Error: response status" details in FastAPI / OpenAPI

I implemented some Custom Exceptions. And now I want to display the Exception name at the red box. Where can I set this value?
class CustomExceptionHandler(Exception):
def __init__(self, exception):
self.message = exception.message
self.status_code_number = exception.status_code_number
#app.exception_handler(CustomExceptionHandler)
async def data_processing_error_handler(request: Request, exc: CustomExceptionHandler):
return JSONResponse(
status_code=exc.status_code_number,
content=jsonable_encoder({exc.message}),
)

How to write offline test cases for FastAPI dependency?

I currently have the following code for my classification model server. The classifier is passed as a dependency to the index (/) function.
# classifier.py
import asyncio
import httpx
class Classifier():
def __init__(
self,
concurrency_limit,
) -> None:
self.client = httpx.AsyncClient()
self.semaphore = asyncio.Semaphore(concurrency_limit)
async def download_async(self, url):
async with self.semaphore:
response = await self.client.get(url)
return await response.aread()
async def run(
self, image_urls
):
image_list = await asyncio.gather(
*[self.download_async(url) for i, url in enumerate(image_urls)]
)
# Infer Images
pass
# api.py
async def classifier_dependency() -> Classifier:
return Classifier(
concurrency_limit=constants.CONCURRENCY_LIMIT,
)
#server.post("/")
async def index(
data, classifier = Depends(classifier_dependency)
) -> Dict:
results = await classifier.run(data.images)
I am trying to write tests for the API which can be run offline. I basically want to much the response from httpx.get(). Here is what I am currently doing.
# test_api.py
class AsyncMock(MagicMock): # Not needed if using Python 3.8
async def __call__(self, *args, **kwargs):
return super(AsyncMock, self).__call__(*args, **kwargs)
def update_mock_dependency(image_bytes):
response = AsyncMock(name="Response")
response.aread.return_value = image_bytes
async def override_dependency():
classifier = Classifier(
concurrency_limit=constants.CONCURRENCY_LIMIT,
)
async def f(_):
return response
classifier.client.get = f
return classifier
server.dependency_overrides[classifier_dependency] = override_dependency
def test_successful_inference(image_bytes, image_urls):
"""
Test that the model output is similar to the expected output.
"""
update_mock_dependency(image_bytes)
data = {"images": image_urls}
response = client.post("/", json=data)
assert response.status_code == 200,
I'm not sure how to go about it right now in a clean way. Is there a better alternative using mock.patch instead of manually overriding the httpx.get function?

Pubsublite message acknowledgement not working

I'm using Google pubsublite. Small dummy topic with single partition and a few messages. Python client lib. Doing the standard SubscriberCluent.subscribe with callback. The callback places message in a queue. When the msg is taken out of the queue for consumption, its ack is called. When I want to stop, I call subscribe_future.cancel(); subscriber_future.result() and discard unconsumed messages in the queue.
Say I know the topic has 30 messages. I consume 10 of them before stopping. Then I restart a new SubscriberClient in the same subscription and receive messages. I expect to get starting with the 11th message, but I got starting with the first. So the precious subscriber has ack'd the first 10, but it's as if server did not receive the acknowledgement.
I thought maybe the ack needs some time to reach the server. So I waited 2 minutes before starting the second subscribe. Didn't help.
Then u thought maybe the subscriber object manages the ack calls, and I need to "flush" them before cancelling, but I found another about that.
What am I missing? Thanks.
Here's the code. If you have pubsublite account, the code is executable after you fill in credentials. The code shows two issues, one is the subject of this question; the other is asked at here
# Using python 3.8
from __future__ import annotations
import logging
import pickle
import queue
import time
import uuid
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from typing import Union, Optional
from google.api_core.exceptions import AlreadyExists
from google.cloud.pubsub_v1.types import BatchSettings
from google.cloud.pubsublite import AdminClient, PubSubMessage
from google.cloud.pubsublite import Reservation as GCPReservation
from google.cloud.pubsublite import Subscription as GCPSubscription
from google.cloud.pubsublite import Topic as GCPTopic
from google.cloud.pubsublite.cloudpubsub import (PublisherClient,
SubscriberClient)
from google.cloud.pubsublite.types import (BacklogLocation, CloudZone,
LocationPath,
ReservationPath, SubscriptionPath,
TopicPath,
)
from google.cloud.pubsublite.types import FlowControlSettings
from google.oauth2.service_account import Credentials
logging.getLogger('google.cloud').setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
FORMAT = '[%(asctime)s.%(msecs)03d %(name)s] %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')
class Account:
def __init__(self,
project_id: str,
region: str,
zone: str,
credentials: Credentials,
):
self.project_id = project_id
self.region = region
self.zone = CloudZone.parse(zone)
self.credentials = credentials
self.client = AdminClient(region=region, credentials=credentials)
def location_path(self) -> LocationPath:
return LocationPath(self.project_id, self.zone)
def reservation_path(self, name: str) -> ReservationPath:
return ReservationPath(self.project_id, self.region, name)
def topic_path(self, name: str) -> TopicPath:
return TopicPath(self.project_id, self.zone, name)
def subscription_path(self, name: str) -> SubscriptionPath:
return SubscriptionPath(self.project_id, self.zone, name)
def create_reservation(self, name: str, *, capacity: int = 32) -> None:
path = self.reservation_path(name)
reservation = GCPReservation(name=str(path),
throughput_capacity=capacity)
self.client.create_reservation(reservation)
# logger.info('reservation %s created', name)
def create_topic(self,
name: str,
*,
partition_count: int = 1,
partition_size_gib: int = 30,
reservation_name: str = 'default') -> Topic:
# A topic name can not be reused within one hour of deletion.
top_path = self.topic_path(name)
res_path = self.reservation_path(reservation_name)
topic = GCPTopic(
name=str(top_path),
partition_config=GCPTopic.PartitionConfig(count=partition_count),
retention_config=GCPTopic.RetentionConfig(
per_partition_bytes=partition_size_gib * 1024 * 1024 * 1024),
reservation_config=GCPTopic.ReservationConfig(
throughput_reservation=str(res_path)))
self.client.create_topic(topic)
# logger.info('topic %s created', name)
return Topic(name, self)
def delete_topic(self, name: str) -> None:
path = self.topic_path(name)
self.client.delete_topic(path)
# logger.info('topic %s deleted', name)
def get_topic(self, name: str) -> Topic:
return Topic(name, self)
class Topic:
def __init__(self, name: str, account: Account):
self.account = account
self.name = name
self._path = self.account.topic_path(name)
def create_subscription(self,
name: str,
*,
pos: str = None) -> Subscription:
path = self.account.subscription_path(name)
if pos is None or pos == 'beginning':
starting_offset = BacklogLocation.BEGINNING
elif pos == 'end':
starting_offset = BacklogLocation.END
else:
raise ValueError(
'Argument start only accepts one of two values - "beginning" or "end"'
)
Conf = GCPSubscription.DeliveryConfig
subscription = GCPSubscription(
name=str(path),
topic=str(self._path),
delivery_config=Conf(delivery_requirement=Conf.DeliveryRequirement.DELIVER_IMMEDIATELY))
self.account.client.create_subscription(subscription, starting_offset)
# logger.info('subscription %s created for topic %s', name, self.name)
return Subscription(name, self)
def delete_subscription(self, name: str) -> None:
path = self.account.subscription_path(name)
self.account.client.delete_subscription(path)
# logger.info('subscription %s deleted from topic %s', name, self.name)
def get_subscription(self, name: str):
return Subscription(name, self)
#contextmanager
def get_publisher(self, **kwargs):
with Publisher(self, **kwargs) as pub:
yield pub
class Publisher:
def __init__(self, topic: Topic, *, batch_size: int = 100):
self.topic = topic
self._batch_config = {
'max_bytes': 3 * 1024 * 1024, # 3 Mb; must be None:
self._messages.put(data)
class Subscription:
def __init__(self, name: str, topic: Topic):
self.topic = topic
self.name = name
self._path = topic.account.subscription_path(name)
#contextmanager
def get_subscriber(self, *, backlog=None):
with Subscriber(self, backlog=backlog) as sub:
yield sub
class Subscriber:
def __init__(self, subscription: Subscription, backlog: int = None):
self.subscription = subscription
self._backlog = backlog or 100
self._cancel_requested: bool = None
self._messages: queue.Queue = None
self._pool: ThreadPoolExecutor = None
self._NOMORE = object()
self._subscribe_task = None
def __enter__(self):
self._pool = ThreadPoolExecutor(1).__enter__()
self._messages = queue.Queue(self._backlog)
messages = self._messages
def callback(msg: PubSubMessage):
logger.info('got %s', pickle.loads(msg.data))
messages.put(msg)
def _subscribe():
flowcontrol = FlowControlSettings(
messages_outstanding=self._backlog,
bytes_outstanding=1024 * 1024 * 10)
subscriber = SubscriberClient(credentials=self.subscription.topic.account.credentials)
with subscriber:
fut = subscriber.subscribe(self.subscription._path, callback, flowcontrol)
logger.info('subscribe sent to gcp')
while True:
if self._cancel_requested:
fut.cancel()
fut.result()
while True:
while not messages.empty():
try:
_ = messages.get_nowait()
except queue.Empty:
break
try:
messages.put_nowait(self._NOMORE)
break
except queue.Full:
continue
break
time.sleep(0.003)
self._subscribe_task = self._pool.submit(_subscribe)
return self
def __exit__(self, *args, **kwargs):
if self._pool is not None:
if self._subscribe_task is not None:
self._cancel_requested = True
while True:
z = self._messages.get()
if z is self._NOMORE:
break
self._subscribe_task.result()
self._subscribe_task = None
self._messages = None
self._pool.__exit__(*args, **kwargs)
self._pool = None
def get(self, timeout=None):
if timeout is not None and timeout == 0:
msg = self._messages.get_nowait()
else:
msg = self._messages.get(block=True, timeout=timeout)
data = pickle.loads(msg.data)
msg.ack()
return data
def get_account() -> Account:
return Account(project_id='--fill-in-proj-id--',
region='us-central1',
zone='us-central1-a',
credentials='--fill-in-creds--')
# This test shows that it takes extremely long to get the first messsage
# in `subscribe`.
def test1(account):
name = 'test-' + str(uuid.uuid4())
topic = account.create_topic(name)
try:
with topic.get_publisher() as p:
p.put(1)
p.put(2)
p.put(3)
sub = topic.create_subscription(name)
try:
with sub.get_subscriber() as s:
t0 = time.time()
logger.info('getting the first message')
z = s.get()
t1 = time.time()
logger.info(' got the first message')
print(z)
print('getting the first msg took', t1 - t0, 'seconds')
finally:
topic.delete_subscription(name)
finally:
account.delete_topic(name)
def test2(account):
name = 'test-' + str(uuid.uuid4())
topic = account.create_topic(name)
N = 30
try:
with topic.get_publisher(batch_size=1) as p:
for i in range(N):
p.put(i)
sub = topic.create_subscription(name)
try:
with sub.get_subscriber() as s:
for i in range(10):
z = s.get()
assert z == i
# The following block shows that the subscriber
# resets to the first message, not as expected
# that it picks up where the last block left.
with sub.get_subscriber() as s:
for i in range(10, 20):
z = s.get()
try:
assert z == i
except AssertionError as e:
print(z, '!=', i)
return
finally:
topic.delete_subscription(name)
finally:
account.delete_topic(name)
if __name__ == '__main__':
a = get_account()
try:
a.create_reservation('default')
except AlreadyExists:
pass
test1(a)
print('')
test2(a)
I found a solution. Before cancelling the "subscribe" future, I need to sleep a little bit to allow acknowledgements to be flushed (i.e. sent out). In particular, google.cloud.pubsublite.cloudpubsub.internal.make_subscriber._DEFAULT_FLUSH_SECONDS (value 0.1) appears to be the time to watch. Need to sleep a little longer than this to be sure.
This is a bug in the google package. "Cancelling" the future means abandon unprocessed messages, whereas submitted acknowledgements should be sent out. This bug may have gone unnoticed because duplicate message delivery is not an error.
I was not able to recreate your issue but I think you should check the way its being handled on the official documentation about using cloud pubsublite.
This is the code I extract and update from Receiving messages sample and It works as intended, it will get the message from the lite-topic and acknowledge to avoid getting it again. if rerun, I will only get the data if there is data to pull. I added the code so you can check if something may differ from your code.
consumer.py
from concurrent.futures._base import TimeoutError
from google.cloud.pubsublite.cloudpubsub import SubscriberClient
from google.cloud.pubsublite.types import (
CloudRegion,
CloudZone,
FlowControlSettings,
SubscriptionPath,
MessageMetadata,
)
from google.cloud.pubsub_v1.types import PubsubMessage
# TODO(developer):
project_number = project-number
cloud_region = "us-central1"
zone_id = "a"
subscription_id = "sub-id"
timeout = 90
location = CloudZone(CloudRegion(cloud_region), zone_id)
subscription_path = SubscriptionPath(project_number, location, subscription_id)
per_partition_flow_control_settings = FlowControlSettings(
messages_outstanding=1000,
bytes_outstanding=10 * 1024 * 1024,
)
def callback(message: PubsubMessage):
message_data = message.data.decode("utf-8")
metadata = MessageMetadata.decode(message.message_id)
print(f"Received {message_data} of ordering key {message.ordering_key} with id {metadata}.")
message.ack()
# SubscriberClient() must be used in a `with` block or have __enter__() called before use.
with SubscriberClient() as subscriber_client:
streaming_pull_future = subscriber_client.subscribe(
subscription_path,
callback=callback,
per_partition_flow_control_settings=per_partition_flow_control_settings,
)
print(f"Listening for messages on {str(subscription_path)}...")
try:
streaming_pull_future.result(timeout=timeout)
except TimeoutError or KeyboardInterrupt:
streaming_pull_future.cancel()
assert streaming_pull_future.done()
The only way I hit your scenario is when I use different subscriptions. But on that regard, when different subscriptions get message from the topic each one will receive the same stored messages as explained on Receiving messages from Lite subscriptions.
Consider this:
Check your subscription deliver configuration. You can use Create and manage Lite subscriptions page for guidance.
Check if your code and the official samples somehow preserve the same structure. For my case, I check the following samples:
Create a Lite reservation
Create a Lite topic
Create a Lite subscription
Publishing messages
Receiving messages

How to update request parameters in FastAPI

I am using FastAPI, I want to define a Middleware in which I can intercept the encrypted parameters passed by the front-end and decrypt them, and replace the original parameters with the decrypted ones, what should I do?
I have tried
body = await request.body()
request._body = body
Also I have tried
async def set_body(request: Request, body: bytes):
async def receive() -> Message:
return {"type": "http.request", "body": body}
request._receive = receive
async def get_body(request: Request) -> bytes:
body = await request.body()
set_body(request, body)
return body
But still no solution, can anyone give a solution to the problem, thanks a lot!
=========================================================================
class GzipRequest(Request):
async def body(self) -> bytes:
# if not hasattr(self, "_body"):
body = await super().body()
# if "gzip" in self.headers.getlist("Content-Encoding"):
# body = gzip.decompress(body)
self._body = body
return self._body
class GzipRoute(APIRoute):
def get_route_handler(self) -> Callable:
original_route_handler = super().get_route_handler()
async def custom_route_handler(request: Request) -> Response:
request = GzipRequest(request.scope, request.receive)
return await original_route_handler(request)
return custom_route_handler
app.router.route_class = GzipRoute
I also tested this method, but it still didn't work!
==============================================================
I have solved this problem by app.router.route_class = GzipRoute,
main tip: when defining a route in another class, you also need to make a route class, for example:
router = APIRouter(route_class=GzipRoute)

Twisted python: the correct way to pass a kwarg through the component system to a factory

I need to pass a kwarg to the parent class of my equivalent of FingerFactoryFromService using super.
I know I am actually passing the kwarg to IFingerFactory because that is also where I pass the service that ends up in init FingerFactoryFromService and I can understand that it is getting tripped up somewhere in the component system but I cannot think of any other way.
The error I keep getting is
exceptions.TypeError: 'test' is an invalid keyword argument for this function
Versions of code in my virtualenv are:
pip (1.4.1)
setuptools (1.1.6)
Twisted (13.1.0)
wsgiref (0.1.2)
zope.interface (4.0.5)
This is a cutdown example from the finger tutorial demonstrating the issue:
from twisted.protocols import basic
from twisted.application import internet, service
from twisted.internet import protocol, reactor, defer
from twisted.python import components
from zope.interface import Interface, implements # #UnresolvedImport
class IFingerService(Interface):
def getUser(user): # #NoSelf
"""
Return a deferred returning a string.
"""
def getUsers(): # #NoSelf
"""
Return a deferred returning a list of strings.
"""
class IFingerFactory(Interface):
def getUser(user): # #NoSelf
"""
Return a deferred returning a string.
"""
def buildProtocol(addr): # #NoSelf
"""
Return a protocol returning a string.
"""
def catchError(err):
return "Internal error in server"
class FingerProtocol(basic.LineReceiver):
def lineReceived(self, user):
d = self.factory.getUser(user)
d.addErrback(catchError)
def writeValue(value):
self.transport.write(value + '\r\n')
self.transport.loseConnection()
d.addCallback(writeValue)
class FingerService(service.Service):
implements(IFingerService)
def __init__(self, filename):
self.filename = filename
self.users = {}
def _read(self):
self.users.clear()
for line in file(self.filename):
user, status = line.split(':', 1)
user = user.strip()
status = status.strip()
self.users[user] = status
self.call = reactor.callLater(30, self._read) # #UndefinedVariable
def getUser(self, user):
print user
return defer.succeed(self.users.get(user, "No such user"))
def getUsers(self):
return defer.succeed(self.users.keys())
def startService(self):
self._read()
service.Service.startService(self)
def stopService(self):
service.Service.stopService(self)
self.call.cancel()
class FingerFactoryFromService(protocol.ServerFactory):
implements(IFingerFactory)
protocol = FingerProtocol
#def __init__(self, srv):
def __init__(self, srv, test=None):
self.service = srv
## I need to call super here because my equivalent of ServerFactory requires
## a kwarg but this cutdown example doesnt so I just assign it to a property
# super(FingerFactoryFromService, self).__init__(test=test)
self.test_thing = test or 'Default Something'
def getUser(self, user):
return self.service.getUser(user)
components.registerAdapter(FingerFactoryFromService,
IFingerService,
IFingerFactory)
application = service.Application('finger')
serviceCollection = service.IServiceCollection(application)
finger_service = FingerService('/etc/passwd')
finger_service.setServiceParent(serviceCollection)
#line_finger_factory = IFingerFactory(finger_service)
line_finger_factory = IFingerFactory(finger_service, test='Something')
line_finger_server = internet.TCPServer(1079, line_finger_factory)
line_finger_server.setServiceParent(serviceCollection)
This has nothing to do with the component system. What you want to do is override the Factory's buildProtocol method, as documented here:
https://twistedmatrix.com/documents/current/core/howto/servers.html#auto9

Categories