How to detect changes in firebase child with python? - python

I have some troubles with this application. What I need is that If I detect a change in the database (FIREBASE) particularly in 'sala' and 'ventilacion' nodes the function do what it have to do. If there isn't any change in the database it would not do nothing. I am using python and pyrebase library. Here is the code. Thank you very much for you help.
import pyrebase
import serial
import time
config = {
#firebase configurations
}
firebase = pyrebase.initialize_app(config)
db = firebase.database()
def ReconfiguracionFabrica():
ser.write('AT')
time.sleep(0.2)
ser.write('AT+RENEW')
time.sleep(0.3)
def ConfiguracionMaster():
time.sleep(0.5)
ser.write('AT+IMME1')
time.sleep(0.350)
ser.write('AT+ROLE1')
time.sleep(0.2)
ser = serial.Serial(port="/dev/ttyAMA0", baudrate=9600, timeout=1)
ReconfiguracionFabrica()
time.sleep(0.1)
ConfiguracionMaster()
time.sleep(0.1)
print "********** INICIO *************"
ser.flushInput()
contador = 0
prender = ''
ventilacion1 = ''
checkeo = ''
while True:
#if db.child("sala").: # It is the line where would be the conditional that allows me to detect any change only in the sala's node.
salidaLed1 = db.child("sala").get()
ser.write('AT')
time.sleep(0.1)
ser.write('AT+CON508CB16A7014')
time.sleep(0.1)
if salidaLed1.val() == True:
prender = ";"
if salidaLed1.val() == False:
prender = ","
ser.write('luz: %s \n' %(prender))
print ('luz: %s \n' %(prender))
time.sleep(1)
ser.read(checkeo)
if checkeo == 'j':
ReconfiguracionFabrica()
time.sleep(0.1)
ConfiguracionMaster()

Question: How to detect changes in firebase child
Note: All Examples use Public Access
Setup Example Data and verify it's readable.
This hase to be done once!
temperature_c = 30
data = {'date':time.strftime('%Y-%m-%d'),
'time':time.strftime('%H:%M:%S'),
'temperature':temperature_c}
db.child('public').child('Device_1').set(data)
response = db.child('public').child('Device_1').get()
print(response.val())
Create First Script doing Updates:
for t in [25, 26, 27, 28, 29, 30, 31, 32, 33, 35]:
temperature_c = t
data = {'date':time.strftime('%Y-%m-%d'),
'time':time.strftime('%H:%M:%S'),
'temperature':temperature_c}
db.child('public').child('Device_1').update(data)
time.sleep(60)
Create Second Script with Stream Handler
def stream_handler(message):
print('event={m[event]}; path={m[path]}; data={m[data]}'
.format(m=message))
my_stream =db.child('public').child('Device_1').stream(stream_handler)
# Run Stream Handler forever
while True:
data = input("[{}] Type exit to disconnect: ".format('?'))
if data.strip().lower() == 'exit':
print('Stop Stream Handler')
if my_stream: my_stream.close()
break
Run Stream Handler Script:
Response Output from def stream_handler after startup (Initial Data):
event="put"; path=/; data={'Device_1': {'temperature': 30, 'time': '13:34:24', 'date': '2017-07-20'}}
Run Updater Script:
Watch Output from Stream Handler Script
Response Output from def stream_handler after First Update Data:
event=patch; path=/Device_1; data={'temperature': 25, 'time': '13:49:12'}
Tested with Python: 3.4.2
Pyrebase
streaming
You can listen to live changes to your data with the stream() method.
def stream_handler(message):
print(message["event"]) # put
print(message["path"]) # /-K7yGTTEp7O549EzTYtI
print(message["data"]) # {'title': 'Pyrebase', "body": "etc..."}
my_stream = db.child("posts").stream(stream_handler)
You should at least handle put and patch events. Refer to "Streaming from the REST API" for details.

I know this post is 2 years old but hope this helps. Try using firebase_admin module.
Use this command - pip install firebase-admin
I too had a requirement where I needed to check for changes made to the Firebase database. I referred here
Following is a sample code based on your question which you can refer from and try it out.
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
cred = credentials.Certificate("path/to/serviceAccountKey.json")
firebase_admin.initialize_app(cred, {
'databaseURL': 'https://example.firebaseio.com',
'databaseAuthVariableOverride': None
})
def ignore_first_call(fn):
called = False
def wrapper(*args, **kwargs):
nonlocal called
if called:
return fn(*args, **kwargs)
else:
called = True
return None
return wrapper
#ignore_first_call
def listener(event):
print(event.event_type) # can be 'put' or 'patch'
print(event.path) # relative to the reference, it seems
print(event.data) # new data at /reference/event.path. None if deleted
node = str(event.path).split('/')[-2] #you can slice the path according to your requirement
property = str(event.path).split('/')[-1]
value = event.data
if (node=='sala'):
#do something
elif (node=='ventilacion'):
#do something
else:
#do something else
db.reference('/').listen(listener)

I was working on the same thing so according to current updates on pyrebase and learning from above posted answers, I got this running perfectly.(Please make sure your python is upgraded from python2 to python3 for running pyrebase and firebase-admin)
import firebase_admin
import pyrebase
from firebase_admin import credentials
config = {
"apiKey": "",
"authDomain": "",
"databaseURL": "",
"projectId": "",
"storageBucket": "",
"serviceAccount": "path to the service account json file you downloaded",
"messagingSenderId": "",
"appId": "",
"measurementId": ""
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
cred = credentials.Certificate("path to downloaded json file")
firebase_admin.initialize_app(cred, {
"databaseURL": "same as config",
"databaseAuthVariableOverride": None
})
db = firebase.database()
def ignore_first_call(fn):
called = False
def wrapper(*args, **kwargs):
nonlocal called
if called:
return fn(*args, **kwargs)
else:
called = True
return None
return wrapper
def stream_handler(message):
ab = str(1)
all_videos = storage.child("videos/").list_files() #node where files are
path_on_local = "local path to save the downloads"
print(message["event"]) # put
print(message["path"]) # /-K7yGTTEp7O549EzTYtI
print(message["data"]) # {'title': 'Pyrebase', "body": "etc..."}
node = str(message["path"]).split('/')[-2]
property = str(message["path"]).split('/')[-1]
value = message["data"]
if (message["event"] == "put"):
for videos in all_videos:
try:
print(videos.name)
z = storage.child(videos.name).get_url(None)
storage.child(videos.name).download(path_on_local + "/" + ab + ".mp4")
x = int(ab)
ab = str(x + 1)
except:
print('Download Failed')
else:
print("error")
my_stream = db.child("videos").stream(stream_handler)

Related

GC (Allocation Failure) [PSYoungGen] issue after running the code

##################################################################################################
# TO INSTALL THIS DEMO SSE CLIENT for Python 3:
# pip install sseclient
# pip install PyJWT
# pip install requests
##################################################################################################
from sseclient import SSEClient
from datetime import datetime
from requests.auth import AuthBase
from pyspark.context import SparkContext
from pyspark.sql.functions import explode, to_date, col, current_date, lit
from delta.tables import *
import jwt # to install: pip install PyJWT
import requests # to install: pip install requests
import time
import boto3
import json
import gc
##################################################################################################
# Authorization
##################################################################################################
class Oauth2(AuthBase):
def __init__(self, client_id, client_secret, oauth2_url):
self.client_id = client_id
self.client_secret = client_secret
self.oauth2_url = oauth2_url
self.token = None
self.expires = None
def __call__(self, r):
"""
If we don't have an access token in hand, or it has expired, get a new token from the auth service.
Then set the access token in the request's Authorization header.
"""
now = time.time()
if not self.token or now > self.expires:
self.token = self.get_fresh_token()
self.expires = now + self.token['expires_in']
r.headers['Authorization'] = 'Bearer ' + self.token['access_token']
return r
def get_fresh_token(self):
"""
Get an authorization header that contains a valid access token for the client
"""
# The token used to sign the grant request itself
jwt_token = jwt.encode({'iat': int(time.time())}, self.client_secret, algorithm='HS256')
# Make the request to the auth service to get an access token for the client
resp = requests.post(
self.oauth2_url,
data = {'grant_type': 'client_credentials', 'client_id': self.client_id, 'client_secret': jwt_token},
verify = False,
allow_redirects = False
)
json_resp = resp.json()
if 'access_token' in json_resp:
return json_resp
elif 'error' in json_resp:
raise Exception("OAuth failed: %s: %s" % (json_resp['error'], json_resp.get('error_description')))
else:
raise Exception("OAuth failed: %s" % (str(json_resp)))
##################################################################################################
# Insert account_field_assignments
##################################################################################################
def insert_account_field_assignments(df):
account_field_assignments_DF = None
account_field_assignments_DF = df.select(
col("account_field_object"),
col("account_no").alias("account_fields_identifier"),
explode("account_fields").alias("account_fields_explode"),
col("created_tstamp"),
col("event_date")
).select(
col("account_field_object").cast("string"),
col("account_fields_identifier").cast("bigint"),
col("account_fields_explode").account_field.ids.field_no.alias("account_field_no").cast("bigint"),
col("account_fields_explode").account_field.ids.field_name.alias("account_field_name").cast("string"),
col("account_fields_explode").field_values.alias("account_field_value").cast("string"),
col("created_tstamp").cast("string"),
col("event_date").cast("string")
)
# Insert into tss_plan table_rate_schedule_assignments
account_field_assignments_DF.write.mode("append") \
.format("delta").partitionBy("event_date").saveAsTable("test.tss_account_field_assignments")
del df
del account_field_assignments_DF
##################################################################################################
# Insert Account
##################################################################################################
def insert_account(data):
print("Data" + data + "\n")
df = spark.read.json(sc.parallelize([data])) \
.withColumn("event_date", current_date().cast("string"))
account_DF = None
try:
df = df.withColumn("status_no", col("status").ids.status_no)
except:
df = df.withColumn("status_no", lit(None))
try:
df = df.withColumn("currency_cd", col("currency").ids.currency_cd)
except:
df = df.withColumn("currency_cd", lit(None))
#....some code here
#....some code here
account_DF = df.select(
col("account_no").cast("bigint"),
col("userid").cast("string"),
col("client_account_id").cast("string"),
col("balance").cast("float"),
col("status_no").cast("integer"),
col("status_tstamp").cast("string"),
col("created_tstamp").cast("string"),
col("updated_tstamp").cast("string"),
col("is_test_account").cast("boolean"),
col("currency_cd").cast("string"),
col("updated_receipt_id").cast("string"),
col("created_receipt_id").cast("string"),
col("sequence_functional_group_id").cast("string"),
col("start_date").cast("string"),
col("is_anonymized").cast("boolean"),
col("notify_method_id").cast("integer"),
col("notification_template_group_id").cast("string"),
col("event_date").cast("string")
)
# Insert into tss_plan table
account_DF.write.mode("append").format("delta").partitionBy("event_date").saveAsTable("test.tss_account")
testing = df.collect()[0]
# Insert into tss_account_field_assignments
try:
if testing.__getitem__("account_fields"):
insert_account_field_assignments(df)
except:
None
del df
del account_DF
del testing
del data
gc.collect()
###################################################################################################################
# Update Account
###################################################################################################################
def update_account(data, header):
print("Data" + data + "\nHeader" + header + "\n")
jsonData = json.loads(data)
jsonCond = json.loads(header)
# prepare condition to update the table test.tss_account
cond = 'account_no=' + str(jsonCond["ref"]["ids"]["account_no"]) + ' and userid="' + str(jsonCond["ref"]["ids"]["userid"]) + '"'
# prepare data to update the table test.tss_account
updateJson = '{'
try:
if jsonData["client_account_id"] != None:
updateJson = updateJson + '"client_account_id":"\'' + str(jsonData["client_account_id"]) + '\'",'
else:
updateJson = updateJson + '"client_account_id":"null",'
except:
None
try:
if jsonData["balance"] != None:
updateJson = updateJson + '"balance":"' + str(jsonData["balance"]) + '",'
else:
updateJson = updateJson + '"balance":"null",'
except:
None
#....some code here
#....some code here
updateJson = updateJson[:-1] + '}'
# update table test.tss_account if not all elements are empty
if updateJson != '}':
try:
deltaTable = DeltaTable.forName(spark, "test.tss_account")
deltaTable.update(
condition = cond,
set = json.loads(updateJson)
)
except Exception as e:
print(e.message, e.args)
del updateJson
del cond
df = spark.read.json(sc.parallelize([data])) \
.withColumn("event_date", current_date().cast("string"))
# prepare data to update the table test.tss_account_field_assignments
# (Basically delete rows matching to account_no from table and insert fresh data back into the table) => only when account_fields is present and non-empty
try:
if jsonData["account_fields"]:
# delete row of table test.tss_account_field_assignments
deleteData = 'account_fields_identifier=' + str(jsonCond["ref"]["ids"]["account_no"])
deltaTable = DeltaTable.forName(spark, "test.tss_account_field_assignments")
deltaTable.delete(deleteData)
deleteData = None
# Insert fresh data into the table test.tss_account_field_assignments
insert_account_field_assignments(df)
except:
None
del jsonCond
del jsonData
del df
del data
del header
gc.collect()
print("Account update complete")
##################################################################################################
# Stream event Handlers (Insert)
##################################################################################################
def insert_messages(data, event):
# Get the message_type of the event
line = data.split("\n", 2)
message_type = (json.loads(line[0]))['ref']['type']
# switch case of message_type
if message_type == "Account":
insert_account(line[1])
##################################################################################################
# Stream event Handlers (Update)
##################################################################################################
def update_messages(data, event):
# Get the message_type of the event
line = data.split("\n", 2)
message_type = (json.loads(line[0]))['ref']['type']
# switch case of message_type
if message_type == "Account":
update_account(line[1], line[0])
##################################################################################################
# Stream event Handlers (Handler Class)
##################################################################################################
class SseHandlers(object):
def __init__(self):
# Map event type to handler
self.event_handlers = {
"create" : insert_messages,
"update" : update_messages,
"delete" : delete_messages,
"load" : insert_messages,
"message" : handle_heartbeat
}
def handleMsg(self, msg):
# Get the handler for the event type. Call that handler with the event's data
self.event_handlers.get(msg.event)(msg.data, msg.event)
##################################################################################################
# A store for a stream's last event id.
##################################################################################################
class StreamStatusStore(object):
def __init__(self):
self.saved_last_id = None
def save_last_id(self, last_id):
self.saved_last_id = last_id
def get_last_id(self):
return self.saved_last_id
##################################################################################################
# Main program
##################################################################################################
if __name__ == "__main__":
# Set all input parameter
sse_service_url = dbutils.widgets.get("sse_service_url")
client_id = dbutils.widgets.get("client_id")
oauth2_url = dbutils.widgets.get("oauth2_url")
client_secret = dbutils.widgets.get("client_secret")
# Create Status store, for fault tolarance
status_store = StreamStatusStore()
# Make an SSE message stream
messages = SSEClient(
sse_service_url, # URL of SSE service
auth = Oauth2(client_id, client_secret, oauth2_url), # authenticator
last_id = status_store.get_last_id() # if a last id was stored locally, start there
)
# Handle messages as they come in off the stream
handlers = SseHandlers()
for msg in messages:
# Handle each message as it comes in
handlers.handleMsg(msg)
# Call the status store with the latest event id. If we crash, we will restart at this point.
status_store.save_last_id(messages.last_id)
###############################################################################################
/*
Explaination
============
=> We are using databricks notebook here
=> The SSEClient loops through each message and then gets stuck at certain point and gives
GC(Allocation Failure)
=> Streaming data gets connected to this SSEClient and then we get the message
=> This is message contains headers and the data
=> We have mentioned the example of 'Create Event' below
header:{"ref":{"ids":{"account_no":XXXXX,"userid":"QA-XXXXX"},"type":"Account"},"tick":202546873272,"tstamp":"2022-04-13T02:51:46-05:00"}
data:{"account_no":XXXXX,"userid":"QA-XXXXX","client_account_id":null,"balance":0,"status":{"ids":{"status_no":1,"enum":1},"type":"Account_Status"},"status_tstamp":"2022-04-13T02:51:48-05:00",
"parent_account":null,"created_tstamp":"2022-04-13T02:51:48-05:00","updated_tstamp":"2022-04-13T02:51:48-05:00","secret_question":null,"secret_question_answer":null,"is_test_account":XXXX,
"is_invoice_approval_required":XXX,"currency":{"ids":{"currency_cd":"usd","code":"usd"},"type":"Currency"},"updated_receipt_id":null,"created_receipt_id":null,
"sequence_functional_group":{"ids":{"group_no":10035487,"client_group_id":"AAMFAG"},"type":"Functional_Account_Group"},"start_date":"XXXX-XX-XX",
"admin_contact":{"ids":{"contact_no":XXXXXXXX},"type":"Contact"},"is_consumer_account":false,"locale":{"ids":{"locale_no":XXXXX},"type":"XXXXX"},"legal_entity":null,"is_anonymized":null,
"notify_method":{"ids":{"method_no":2,"enum":2},"type":"Notify_Method"},"functional_account_groups":[{"ids":{"group_no":10035487,"client_group_id":"XXXXXX"},"type":"Functional_Account_Group"}],"collection_account_groups":[{"ids":{"group_no":XXXXXXXX,"client_group_id":"USChase-USD_Arlo"},"type":"Collection_Account_Group"}],
"account_fields":[{"account_field":{"ids":{"field_no":XXXX,"field_name":"Legal Entity"},"type":"Account_Field"},"field_values":["Arlo Technologies Inc"]},{"account_field":{"ids":{"field_no":XXXX,"field_name":"GeoIP Country"},"type":"Account_Field"},"field_values":["US"]},{"account_field":{"ids":{"field_no":2998,"field_name":"Calling Application"},"type":"Account_Field"},"field_values":["XXXX"]},{"account_field":{"ids":{"field_no":XXXX,"field_name":"Company Code"},"type":"Account_Field"},"field_values":["XXXX"]}],
"tax_exemption_level":null,"notification_overrides":[],"notification_template_group":{"ids":{"notification_template_group_no":XXXX,"client_notification_template_group_id":"XXXXXXXXX"},"type":"Notification_Template_Group"}
,"surcharges":[],"revrec_profile_number":null,"purchase_order_number":null,"cc_email_addresses":[],"bcc_email_addresses":[]}
event:create
##################################################################################################
Below is the response message after running the code
======================================================
2022-05-19T09:24:05.599+0000: [GC (Allocation Failure) [PSYoungGen: 6264108K->70234K(6417920K)] 6630117K->436251K(19642880K), 0.0477541 secs] [Times: user=0.16 sys=0.00, real=0.05 secs]
*/

Unable to import module - Lambda handler Error

When I am trying to run my python code in lambda passing the handler to the function.module getting the below error, any suggestions how i could resolve this?
the below file test_client_visitor is triggered to call the client_visitor and send an email to the clients accordingly, when i run thd python file test_client_visitor in my local i get the email triggered successfully but in lambda facing the issue.
file_name: test_client_visitor
function = __import__('client_visitor')
handler = function.scan_clients
class TestFunction(unittest.TestCase):
def test_function(self):
file = open('event.json', 'rb')
try:
ba = bytearray(file.read())
event = jsonpickle.decode(ba)
print('## EVENT')
print(jsonpickle.encode(event))
context = {'requestid': '1234'}
result = handler(event, context)
print(result)
self.assertTrue(result, 'Emails could not be sent!')
finally:
file.close()
file.close()
if __name__ == '__main__':
unittest.main()
file_name: client_visitor.py
import datetime
import boto3
from aws_ses import send_bulk_templated_email
# boto3.set_stream_logger('botocore', level='DEBUG')
from mongodb import get_mongo_db
def process_clients(clients, developers, clients_to_be_notified, days):
if not clients:
pass
check_date = datetime.datetime.now() + datetime.timedelta(days)
for client in clients:
client_id_ = client['client_id']
if 'developer_id' in client:
developers[client_id_] = client['developer_id']
else:
if 'secrets' in client:
secrets = client['secrets']
for secret in secrets:
if 'not_on_or_after' in secret and secret['not_on_or_after'] < check_date.timestamp():
clients_to_be_notified.append({'client_id': client_id_,
'expiration_date': datetime.datetime.fromtimestamp(
secret['not_on_or_after']).strftime('%m/%d/%Y')})
print("adding client to notify List", client_id_, ":", client['sort'])
def notify_clients(clients_to_be_notified, developers):
developer_id_list = []
for client_secret in clients_to_be_notified:
developer_id_list.append(developers[client_secret['client_id']])
if developer_id_list:
db = get_mongo_db()
if db:
users = list(db.users.find({'guid': {'$in': developer_id_list}}, {'email', 'guid'}))
need_to_send_email = False
for user in users:
for client_secret in clients_to_be_notified:
if developers[client_secret['client_id']] == user['guid']:
client_secret['email'] = user['email']
need_to_send_email = True
break
if need_to_send_email:
return send_bulk_templated_email(clients_to_be_notified)
else:
return False
return True
def scan_clients(event, context):
local = False
if 'local' in event:
local = event['local'] == 'True'
if local:
dynamodb = boto3.resource('dynamodb', endpoint_url="http://localhost:8000")
else:
dynamodb = boto3.resource('dynamodb')
days = 30
if 'days' in event:
days = int(event['days'])
print(f"Scanning Clients with {days} or less to secret expiration")
table = dynamodb.Table('****')
scan_kwargs = {
'ProjectionExpression': 'client_id, sort, developer_id, secrets, approved'
}
test = False
if 'test' in event:
test = event['test'] == 'True'
done = False
start_key = None
developers = {}
clients_to_be_notified = []
if test:
developers['idm-portal1'] = '***'
clients_to_be_notified = [{'client_id': 'idm-portal1', 'expiration_date': '04/17/2021'}]
while not done:
if start_key:
scan_kwargs['ExclusiveStartKey'] = start_key
response = table.scan(**scan_kwargs)
process_clients(response.get('Items', []), developers, clients_to_be_notified, days)
start_key = response.get('LastEvaluatedKey', None)
done = start_key is None
print("total developers ", len(developers), " total clients_to_be_notified ", len(clients_to_be_notified))
return notify_clients(clients_to_be_notified, developers)
if __name__ == '__main__':
scan_clients(event={'days': 30, 'local': False, 'test': True}, context=None)
Response
{
"errorMessage": "Unable to import module 'test_client_visitor': No module named 'test_client_visitor'",
"errorType": "Runtime.ImportModuleError",
"stackTrace": []
}
Your file must be named test_client_visitor.py. The way lambda runs the code is by trying to import the main file and call the handler function. See the AWS docs to set up a handler for Python.
The reason you didn't run into this issue locally is because I assume you are calling python directly on the command line — python test_client_visitor. When you import a module in Python, the file has to end in the .py extension.
Able to fix this issue with right packaging of the contents to zip, avoided the creation of extra folder with the below command.
Command:
cd folder; zip -r ../filename.zip *
Thankyou everyone for your inputs.

iot edge direct method handler in python

I have created a module for a Bacnet scan and it will respond with a list of devices and its address as a result. But I'm having trouble implementing a direct method handler in python. When i first tried implementing it myself i got this error. Which could mean I didn't successfully register the direct method callback. I have some references but it was from C# and azure docs is not helping me figure out the right method to register the callback. for IoTHubModuleClient there's a on_method_request_received and a receive_method_request. appreciate any help!
def iothub_client_scan_run():
try:
iot_client = iothub_client_init()
bacnet_scan_listener_thread = threading.Thread(target=device_method_listener, args=(iot_client,))
bacnet_scan_listener_thread.daemon = True
bacnet_scan_listener_thread.start()
while True:
time.sleep(1000)
def device_method_listener(iot_client):
while True:
# Receive the direct method request
method_request = iot_client.receive_method_request()
print (
"\nMethod callback called with:\nmethodName = {method_name}\npayload = {payload}".format(
method_name=method_request.name,
payload=method_request.payload
)
)
if method_request.name == "runBacnetScan":
response = bacnet_scan_device(method_request)
else:
response_payload = {"Response": "Direct method {} not defined".format(method_request.name)}
response_status = 404
# Send a method response indicating the method request was resolved
print('Sending method response')
iot_client.send_method_response(response)
print('Message sent!')
Edit:
Here is my route config
I was able to resolve my issue or at least find the root cause and it was my network configuration under the createOptions. It seems like there's an issue when I'm trying to do NetworkMode: host and connects to the IotModuleClient.connect_from_edge_environment via connect with connection string. I'm still trying to tweak the connection configuration but at least i know its not on the code.
async def method_request_handler(module_client):
while True:
method_request = await module_client.receive_method_request()
print (
"\nMethod callback called with:\nmethodName = {method_name}\npayload = {payload}".format(
method_name=method_request.name,
payload=method_request.payload
)
)
if method_request.name == "method1":
payload = {"result": True, "data": "some data"} # set response payload
status = 200 # set return status code
print("executed method1")
elif method_request.name == "method2":
payload = {"result": True, "data": 1234} # set response payload
status = 200 # set return status code
print("executed method2")
else:
payload = {"result": False, "data": "unknown method"} # set response payload
status = 400 # set return status code
print("executed unknown method: " + method_request.name)
# Send the response
method_response = MethodResponse.create_from_method_request(method_request, status, payload)
await module_client.send_method_response(method_response)
print('Message sent!')
def stdin_listener():
while True:
try:
selection = input("Press Q to quit\n")
if selection == "Q" or selection == "q":
print("Quitting...")
break
except:
time.sleep(10)
# Schedule task for C2D Listener
listeners = asyncio.gather(input1_listener(module_client), twin_patch_listener(module_client), method_request_handler(module_client))

How to download files from Google Vault export immediately after creating it with Python API?

Using Python API, I have created an export. How do I download the .zip file in the export using the same authorized service? When creating the export, I can see the bucketName and objectNames of the cloudStorageSink, however I cannot find any documentation on how to download them to my host using the existing service that created the export
#!/usr/bin/env python
from __future__ import print_function
import datetime
import json
import time
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/ediscovery'
def list_exports(service, matter_id):
return service.matters().exports().list(matterId=matter_id).execute()
def get_export_by_id(service, matter_id, export_id):
return service.matters().exports().get(matterId=matter_id, exportId=export_id).execute()
def get_service():
'''
Look for an active credential token, if one does not exist, use credentials.json
and ask user for permission to access. Store new token, return the service object
'''
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('vault', 'v1', http=creds.authorize(Http()))
return service
def create_drive_export(service, matter_id, export_name, num_days):
"""
once we have a matter_id , we can create an export under it with the relevant files we are looking for.
"""
# set times for beginning and end of query:
today = datetime.datetime.now()
print("creating a drive export at {}".format(today))
start_time = today - datetime.timedelta(days=num_days)
drive_query_options = {'includeTeamDrives': True}
user_list = ['me#gmail.com']
drive_query = {
'corpus': 'DRIVE',
'dataScope': 'ALL_DATA',
'searchMethod': 'ACCOUNT',
'accountInfo': {
'emails': user_list
},
'driveOptions': drive_query_options,
# end time is more recent date, start time is older date
'endTime': '{}-{}-{}T00:00:00Z'.format(today.year, today.month, today.day),
'startTime': '{}-{}-{}T00:00:00Z'.format(start_time.year, start_time.month, start_time.day),
'timeZone': 'Etc/GMT'
}
wanted_export = {
'name': export_name,
'query': drive_query,
'exportOptions': {
'driveOptions': {}
}
}
return service.matters().exports().create(matterId=matter_id, body=wanted_export).execute()
def get_export(service, matter_id, export_id):
return service.matters().exports().get(matterId=matter_id, exportId=export_id).execute()
def main():
service = get_service()
matter_id = '<known_matter_id>'
timestamp = datetime.datetime.now().strftime("%Y%m%d.%H%M%s")
export = create_drive_export(service, matter_id, "code_gen_export.{}".format(timestamp), 1)
# check every 5 seconds until export is done being created:
while export['status'] == 'IN_PROGRESS':
export = get_export(service, matter_id, export['id'])
print('...')
time.sleep(5)
# print(json.dumps(export, indent=2))
print(json.dumps(export['cloudStorageSink']['files'], indent=2))
if __name__ == '__main__':
main()
and running the above code produces:
creating a drive export at 2018-09-20 17:12:38.026402
...
...
...
...
...
...
[
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object1_string/code_gen_export.20180920.17121537481558-custodian-docid.csv",
"size": "1684"
},
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object2_string/code_gen_export.20180920.17121537481558-metadata.xml",
"size": "10600"
},
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object3_string/code_gen_export.20180920.17121537481558_0.zip",
"size": "21599222"
}
]
Can I download the .zip file using the service object I created in get_service()?
After a long struggle with the above, I found the right approach with the aid of one of Googles' API support agents.
Notice that you will need to create a new service using:
build('storage', 'v1', credentials=credentials)
where cradintials is:
service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE,
scopes=SCOPES,
subject='user#domain.com'
)
(it may be that the same argument you used for your credentials: http=creds.authorize(Http()) will work as weel - I did not try that)
In addition you will need to use a byte stream library such as: io and import googleapiclient.http as well.
The full code:
import io
from google.oauth2 import service_account
from googleapiclient.discovery import build
import googleapiclient.http
SCOPES = ['https://www.googleapis.com/auth/devstorage.full_control']
SERVICE_ACCOUNT_FILE = 'yourServiceAccountFile.json'
bucket_name = 'yourBucketName'
object_name = 'yourObjectName.zip'
credentials = service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE,
scopes=SCOPES,
subject='user#domain.com'
)
service = build('storage', 'v1', credentials=credentials)
req = service.objects().get_media(bucket=bucket_name, object=object_name)
out_file = io.BytesIO()
downloader = googleapiclient.http.MediaIoBaseDownload(out_file, req)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download {}%.".format(int(status.progress() * 100)))
file_name = '/Users/myUser/Downloads/new_file.zip'
open(file_name, "w").write(out_file.getvalue())
The above answer is great, but causes issue with large files, as BytesIO holds the data in memory. In a low RAM environment a 2GB can kill your download. Suggest using FileIO instead.
Change the following bit of code:
out_file = io.BytesIO()
downloader = googleapiclient.http.MediaIoBaseDownload(out_file, req)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download {}%.".format(int(status.progress() * 100)))
file_name = '/Users/myUser/Downloads/new_file.zip'
open(file_name, "w").write(out_file.getvalue())
To:
file_name = '/myfilepath/myfilename.ext'
with io.FileIO(file_name, mode='wb') as out_file:
downloader = googleapiclient.http.MediaIoBaseDownload(out_file, req)
done = False
while not done:
status, done = downloader.next_chunk()

Take Dialogflow intent and query Firestore

My chatbot has been created in Dialogflow and I am now trying to access it from Python, to take user inputs and display outputs in the GUI (think a basic chatbot gui).
I have connected my Python environment to Dialogflow and to firestore,
Here is the code that detects intents;
#Detection of Dialogflow intents, project and input.
def detect_intent_texts(project_id, session_id, texts, language_code):
#Returns the result of detect intent with texts as inputs, later can implement same `session_id` between requests allows continuation of the conversaion.
import dialogflow_v2 as dialogflow
session_client = dialogflow.SessionsClient()
session = session_client.session_path(project_id, session_id)
#To ensure session path is correct - print('Session path: {}\n'.format(session))
for text in texts:
text_input = dialogflow.types.TextInput(text=text, language_code=language_code)
query_input = dialogflow.types.QueryInput(text=text_input)
response = session_client.detect_intent(session=session, query_input=query_input)
print ('Chatbot:{}\n'.format(response.query_result.fulfillment_text))
detect_intent_texts("chat-8","abc",["Hey"],"en-us")
I need to somehow say if THIS intent is triggered, get something from the db and display to user.
UPDATE
My current code in full, everything is looking right to me but it's throwing an error I don't understand, thanks to Sid8491 for the help so far.
in short, my issue is, my previous code allowed me to type something and the chatbot respond, it was all in the console but it worked... The new code is supposed to allow me to say "When THIS intent is triggered, do THIS"
import os, json
import sys
import dialogflow
from dialogflow_v2beta1 import *
import firebase_admin
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
from firebase_admin import firestore
from firebase_admin import credentials
import requests.packages.urllib3
from Tkinter import *
from dialogflow_v2beta1 import agents_client
import Tkinter as tk
result = None
window = Tk()
def Response():
#no need to use global here
result = myText.get()
displayText.configure(state='normal')
displayText.insert(END, "User:"+ result + '\n')
displayText.configure(state='disabled')
#Creating the GUI
myText = tk.StringVar()
window.resizable(False, False)
window.title("Chatbot")
window.geometry('400x400')
User_Input = tk.Entry(window, textvariable=myText, width=50).place(x=20, y=350)
subButton = tk.Button(window, text="Send", command=Response).place(x =350, y=350)
displayText = Text(window, height=20, width=40)
displayText.pack()
scroll = Scrollbar(window, command=displayText).pack(side=RIGHT)
window.mainloop()
#Initialize the firebase admin SDK
cred = credentials.Certificate('./file.json')
default_app = firebase_admin.initialize_app(cred)
db = firestore.client()
def getCourse():
doc_ref = db.collection(u"Course_Information").document(u"CourseTypes")
try:
doc = doc_ref.get()
return 'Document data: {}'.format(doc.to_dict())
except google.cloud.exceptions.NotFound:
return 'Not found'
def detect_intent_text(project_id, session_id, text, language_code):
GOOGLE_APPLICATION_CREDENTIALS=".chat-8.json"
session_client = dialogflow.SessionsClient(GOOGLE_APPLICATION_CREDENTIALS)
session = session_client.session_path(project_id, session_id)
text_input = dialogflow.types.TextInput(
text=text, language_code=language_code)
query_input = dialogflow.types.QueryInput(text=text_input)
response = session_client.detect_intent(
session=session, query_input=query_input)
queryText = [myText.get()]
res = detect_intent_text('chat-8', 'session-test', queryText, 'en')
intentName = res['query_result']['intent']['display_name']
if intentName == 'CourseEnquiry':
reponse = getCourse()
print json.dumps({
'fulfillmentText': reponse,
})
elif intentName == 'Greetings':
print "Yo"
detect_intent_texts("chat-8","abc", queryText,"en-us")
But I get this error:
C:\Users\chat\PycharmProjects\Chatbot\venv\Scripts\python.exe C:/Users/chat/PycharmProjects/Chatbot/venv/Chatbot.py
Traceback (most recent call last):
File "C:/Users/chat/PycharmProjects/Chatbot/venv/Chatbot.py", line 65, in <module>
res = detect_intent_text('chat-8', 'session-test', queryText, 'en')
File "C:/Users/chat/PycharmProjects/Chatbot/venv/Chatbot.py", line 51, in detect_intent_text
session_client = dialogflow.SessionsClient(GOOGLE_APPLICATION_CREDENTIALS)
File "C:\Users\chat\PycharmProjects\Chatbot\venv\lib\site-packages\dialogflow_v2\gapic\sessions_client.py", line 109, in __init__
self.sessions_stub = (session_pb2.SessionsStub(channel))
File "C:\Users\chat\PycharmProjects\Chatbot\venv\lib\site-packages\dialogflow_v2\proto\session_pb2.py", line 1248, in __init__
self.DetectIntent = channel.unary_unary(
AttributeError: 'str' object has no attribute 'unary_unary'
Process finished with exit code 1
Yes, I think you are on the right track.
You need to extract intentName or actionaName from the response you got from the dialogFlow and call your corresponding functions and then send the response back to user.
res = detect_intent_texts("chat-8","abc",["Hey"],"en-us")
action = res['queryResult']['action']
if action == 'getSomethingFromDb':
reponse = someFunction(req)
return json.dumps({
'fulfillmentText': reponse,
})
elif action == 'somethingElse':
....
If you want it to do using intentName instead of actionName then you can extract intentName like below
intentName = res['query_result']['intent']['display_name']
EDIT 1:
Example -
import dialogflow
import os, json
def getCourse():
doc_ref = db.collection(u"Course_Information").document(u"CourseTypes")
try:
doc = doc_ref.get()
return 'Document data: {}'.format(doc.to_dict())
except google.cloud.exceptions.NotFound:
return 'Not found'
def detect_intent_text(project_id, session_id, text, language_code):
GOOGLE_APPLICATION_CREDENTIALS="C:\\pyth_to_...\\cred.json"
session_client = dialogflow.SessionsClient(GOOGLE_APPLICATION_CREDENTIALS)
session = session_client.session_path(project_id, session_id)
text_input = dialogflow.types.TextInput(
text=text, language_code=language_code)
query_input = dialogflow.types.QueryInput(text=text_input)
response = session_client.detect_intent(
session=session, query_input=query_input)
queryText = 'get courses of Python' # you will call some function to get text from your app
res = detect_intent_text('project_1234', 'session-test', queryText, 'en')
intentName = res['query_result']['intent']['display_name']
if intentName == 'getCourse':
reponse = getCourse()
return json.dumps({
'fulfillmentText': reponse,
})
Try above example and change according to your needs of app. My suggestion is to first get DialogFlow working without app, then integrate it with the app. Otherwise you won't be able to understand whether problem is happening in DialogFlow or your app.
Hope it helps.

Categories