##################################################################################################
# TO INSTALL THIS DEMO SSE CLIENT for Python 3:
# pip install sseclient
# pip install PyJWT
# pip install requests
##################################################################################################
from sseclient import SSEClient
from datetime import datetime
from requests.auth import AuthBase
from pyspark.context import SparkContext
from pyspark.sql.functions import explode, to_date, col, current_date, lit
from delta.tables import *
import jwt # to install: pip install PyJWT
import requests # to install: pip install requests
import time
import boto3
import json
import gc
##################################################################################################
# Authorization
##################################################################################################
class Oauth2(AuthBase):
def __init__(self, client_id, client_secret, oauth2_url):
self.client_id = client_id
self.client_secret = client_secret
self.oauth2_url = oauth2_url
self.token = None
self.expires = None
def __call__(self, r):
"""
If we don't have an access token in hand, or it has expired, get a new token from the auth service.
Then set the access token in the request's Authorization header.
"""
now = time.time()
if not self.token or now > self.expires:
self.token = self.get_fresh_token()
self.expires = now + self.token['expires_in']
r.headers['Authorization'] = 'Bearer ' + self.token['access_token']
return r
def get_fresh_token(self):
"""
Get an authorization header that contains a valid access token for the client
"""
# The token used to sign the grant request itself
jwt_token = jwt.encode({'iat': int(time.time())}, self.client_secret, algorithm='HS256')
# Make the request to the auth service to get an access token for the client
resp = requests.post(
self.oauth2_url,
data = {'grant_type': 'client_credentials', 'client_id': self.client_id, 'client_secret': jwt_token},
verify = False,
allow_redirects = False
)
json_resp = resp.json()
if 'access_token' in json_resp:
return json_resp
elif 'error' in json_resp:
raise Exception("OAuth failed: %s: %s" % (json_resp['error'], json_resp.get('error_description')))
else:
raise Exception("OAuth failed: %s" % (str(json_resp)))
##################################################################################################
# Insert account_field_assignments
##################################################################################################
def insert_account_field_assignments(df):
account_field_assignments_DF = None
account_field_assignments_DF = df.select(
col("account_field_object"),
col("account_no").alias("account_fields_identifier"),
explode("account_fields").alias("account_fields_explode"),
col("created_tstamp"),
col("event_date")
).select(
col("account_field_object").cast("string"),
col("account_fields_identifier").cast("bigint"),
col("account_fields_explode").account_field.ids.field_no.alias("account_field_no").cast("bigint"),
col("account_fields_explode").account_field.ids.field_name.alias("account_field_name").cast("string"),
col("account_fields_explode").field_values.alias("account_field_value").cast("string"),
col("created_tstamp").cast("string"),
col("event_date").cast("string")
)
# Insert into tss_plan table_rate_schedule_assignments
account_field_assignments_DF.write.mode("append") \
.format("delta").partitionBy("event_date").saveAsTable("test.tss_account_field_assignments")
del df
del account_field_assignments_DF
##################################################################################################
# Insert Account
##################################################################################################
def insert_account(data):
print("Data" + data + "\n")
df = spark.read.json(sc.parallelize([data])) \
.withColumn("event_date", current_date().cast("string"))
account_DF = None
try:
df = df.withColumn("status_no", col("status").ids.status_no)
except:
df = df.withColumn("status_no", lit(None))
try:
df = df.withColumn("currency_cd", col("currency").ids.currency_cd)
except:
df = df.withColumn("currency_cd", lit(None))
#....some code here
#....some code here
account_DF = df.select(
col("account_no").cast("bigint"),
col("userid").cast("string"),
col("client_account_id").cast("string"),
col("balance").cast("float"),
col("status_no").cast("integer"),
col("status_tstamp").cast("string"),
col("created_tstamp").cast("string"),
col("updated_tstamp").cast("string"),
col("is_test_account").cast("boolean"),
col("currency_cd").cast("string"),
col("updated_receipt_id").cast("string"),
col("created_receipt_id").cast("string"),
col("sequence_functional_group_id").cast("string"),
col("start_date").cast("string"),
col("is_anonymized").cast("boolean"),
col("notify_method_id").cast("integer"),
col("notification_template_group_id").cast("string"),
col("event_date").cast("string")
)
# Insert into tss_plan table
account_DF.write.mode("append").format("delta").partitionBy("event_date").saveAsTable("test.tss_account")
testing = df.collect()[0]
# Insert into tss_account_field_assignments
try:
if testing.__getitem__("account_fields"):
insert_account_field_assignments(df)
except:
None
del df
del account_DF
del testing
del data
gc.collect()
###################################################################################################################
# Update Account
###################################################################################################################
def update_account(data, header):
print("Data" + data + "\nHeader" + header + "\n")
jsonData = json.loads(data)
jsonCond = json.loads(header)
# prepare condition to update the table test.tss_account
cond = 'account_no=' + str(jsonCond["ref"]["ids"]["account_no"]) + ' and userid="' + str(jsonCond["ref"]["ids"]["userid"]) + '"'
# prepare data to update the table test.tss_account
updateJson = '{'
try:
if jsonData["client_account_id"] != None:
updateJson = updateJson + '"client_account_id":"\'' + str(jsonData["client_account_id"]) + '\'",'
else:
updateJson = updateJson + '"client_account_id":"null",'
except:
None
try:
if jsonData["balance"] != None:
updateJson = updateJson + '"balance":"' + str(jsonData["balance"]) + '",'
else:
updateJson = updateJson + '"balance":"null",'
except:
None
#....some code here
#....some code here
updateJson = updateJson[:-1] + '}'
# update table test.tss_account if not all elements are empty
if updateJson != '}':
try:
deltaTable = DeltaTable.forName(spark, "test.tss_account")
deltaTable.update(
condition = cond,
set = json.loads(updateJson)
)
except Exception as e:
print(e.message, e.args)
del updateJson
del cond
df = spark.read.json(sc.parallelize([data])) \
.withColumn("event_date", current_date().cast("string"))
# prepare data to update the table test.tss_account_field_assignments
# (Basically delete rows matching to account_no from table and insert fresh data back into the table) => only when account_fields is present and non-empty
try:
if jsonData["account_fields"]:
# delete row of table test.tss_account_field_assignments
deleteData = 'account_fields_identifier=' + str(jsonCond["ref"]["ids"]["account_no"])
deltaTable = DeltaTable.forName(spark, "test.tss_account_field_assignments")
deltaTable.delete(deleteData)
deleteData = None
# Insert fresh data into the table test.tss_account_field_assignments
insert_account_field_assignments(df)
except:
None
del jsonCond
del jsonData
del df
del data
del header
gc.collect()
print("Account update complete")
##################################################################################################
# Stream event Handlers (Insert)
##################################################################################################
def insert_messages(data, event):
# Get the message_type of the event
line = data.split("\n", 2)
message_type = (json.loads(line[0]))['ref']['type']
# switch case of message_type
if message_type == "Account":
insert_account(line[1])
##################################################################################################
# Stream event Handlers (Update)
##################################################################################################
def update_messages(data, event):
# Get the message_type of the event
line = data.split("\n", 2)
message_type = (json.loads(line[0]))['ref']['type']
# switch case of message_type
if message_type == "Account":
update_account(line[1], line[0])
##################################################################################################
# Stream event Handlers (Handler Class)
##################################################################################################
class SseHandlers(object):
def __init__(self):
# Map event type to handler
self.event_handlers = {
"create" : insert_messages,
"update" : update_messages,
"delete" : delete_messages,
"load" : insert_messages,
"message" : handle_heartbeat
}
def handleMsg(self, msg):
# Get the handler for the event type. Call that handler with the event's data
self.event_handlers.get(msg.event)(msg.data, msg.event)
##################################################################################################
# A store for a stream's last event id.
##################################################################################################
class StreamStatusStore(object):
def __init__(self):
self.saved_last_id = None
def save_last_id(self, last_id):
self.saved_last_id = last_id
def get_last_id(self):
return self.saved_last_id
##################################################################################################
# Main program
##################################################################################################
if __name__ == "__main__":
# Set all input parameter
sse_service_url = dbutils.widgets.get("sse_service_url")
client_id = dbutils.widgets.get("client_id")
oauth2_url = dbutils.widgets.get("oauth2_url")
client_secret = dbutils.widgets.get("client_secret")
# Create Status store, for fault tolarance
status_store = StreamStatusStore()
# Make an SSE message stream
messages = SSEClient(
sse_service_url, # URL of SSE service
auth = Oauth2(client_id, client_secret, oauth2_url), # authenticator
last_id = status_store.get_last_id() # if a last id was stored locally, start there
)
# Handle messages as they come in off the stream
handlers = SseHandlers()
for msg in messages:
# Handle each message as it comes in
handlers.handleMsg(msg)
# Call the status store with the latest event id. If we crash, we will restart at this point.
status_store.save_last_id(messages.last_id)
###############################################################################################
/*
Explaination
============
=> We are using databricks notebook here
=> The SSEClient loops through each message and then gets stuck at certain point and gives
GC(Allocation Failure)
=> Streaming data gets connected to this SSEClient and then we get the message
=> This is message contains headers and the data
=> We have mentioned the example of 'Create Event' below
header:{"ref":{"ids":{"account_no":XXXXX,"userid":"QA-XXXXX"},"type":"Account"},"tick":202546873272,"tstamp":"2022-04-13T02:51:46-05:00"}
data:{"account_no":XXXXX,"userid":"QA-XXXXX","client_account_id":null,"balance":0,"status":{"ids":{"status_no":1,"enum":1},"type":"Account_Status"},"status_tstamp":"2022-04-13T02:51:48-05:00",
"parent_account":null,"created_tstamp":"2022-04-13T02:51:48-05:00","updated_tstamp":"2022-04-13T02:51:48-05:00","secret_question":null,"secret_question_answer":null,"is_test_account":XXXX,
"is_invoice_approval_required":XXX,"currency":{"ids":{"currency_cd":"usd","code":"usd"},"type":"Currency"},"updated_receipt_id":null,"created_receipt_id":null,
"sequence_functional_group":{"ids":{"group_no":10035487,"client_group_id":"AAMFAG"},"type":"Functional_Account_Group"},"start_date":"XXXX-XX-XX",
"admin_contact":{"ids":{"contact_no":XXXXXXXX},"type":"Contact"},"is_consumer_account":false,"locale":{"ids":{"locale_no":XXXXX},"type":"XXXXX"},"legal_entity":null,"is_anonymized":null,
"notify_method":{"ids":{"method_no":2,"enum":2},"type":"Notify_Method"},"functional_account_groups":[{"ids":{"group_no":10035487,"client_group_id":"XXXXXX"},"type":"Functional_Account_Group"}],"collection_account_groups":[{"ids":{"group_no":XXXXXXXX,"client_group_id":"USChase-USD_Arlo"},"type":"Collection_Account_Group"}],
"account_fields":[{"account_field":{"ids":{"field_no":XXXX,"field_name":"Legal Entity"},"type":"Account_Field"},"field_values":["Arlo Technologies Inc"]},{"account_field":{"ids":{"field_no":XXXX,"field_name":"GeoIP Country"},"type":"Account_Field"},"field_values":["US"]},{"account_field":{"ids":{"field_no":2998,"field_name":"Calling Application"},"type":"Account_Field"},"field_values":["XXXX"]},{"account_field":{"ids":{"field_no":XXXX,"field_name":"Company Code"},"type":"Account_Field"},"field_values":["XXXX"]}],
"tax_exemption_level":null,"notification_overrides":[],"notification_template_group":{"ids":{"notification_template_group_no":XXXX,"client_notification_template_group_id":"XXXXXXXXX"},"type":"Notification_Template_Group"}
,"surcharges":[],"revrec_profile_number":null,"purchase_order_number":null,"cc_email_addresses":[],"bcc_email_addresses":[]}
event:create
##################################################################################################
Below is the response message after running the code
======================================================
2022-05-19T09:24:05.599+0000: [GC (Allocation Failure) [PSYoungGen: 6264108K->70234K(6417920K)] 6630117K->436251K(19642880K), 0.0477541 secs] [Times: user=0.16 sys=0.00, real=0.05 secs]
*/
Related
When I am trying to run my python code in lambda passing the handler to the function.module getting the below error, any suggestions how i could resolve this?
the below file test_client_visitor is triggered to call the client_visitor and send an email to the clients accordingly, when i run thd python file test_client_visitor in my local i get the email triggered successfully but in lambda facing the issue.
file_name: test_client_visitor
function = __import__('client_visitor')
handler = function.scan_clients
class TestFunction(unittest.TestCase):
def test_function(self):
file = open('event.json', 'rb')
try:
ba = bytearray(file.read())
event = jsonpickle.decode(ba)
print('## EVENT')
print(jsonpickle.encode(event))
context = {'requestid': '1234'}
result = handler(event, context)
print(result)
self.assertTrue(result, 'Emails could not be sent!')
finally:
file.close()
file.close()
if __name__ == '__main__':
unittest.main()
file_name: client_visitor.py
import datetime
import boto3
from aws_ses import send_bulk_templated_email
# boto3.set_stream_logger('botocore', level='DEBUG')
from mongodb import get_mongo_db
def process_clients(clients, developers, clients_to_be_notified, days):
if not clients:
pass
check_date = datetime.datetime.now() + datetime.timedelta(days)
for client in clients:
client_id_ = client['client_id']
if 'developer_id' in client:
developers[client_id_] = client['developer_id']
else:
if 'secrets' in client:
secrets = client['secrets']
for secret in secrets:
if 'not_on_or_after' in secret and secret['not_on_or_after'] < check_date.timestamp():
clients_to_be_notified.append({'client_id': client_id_,
'expiration_date': datetime.datetime.fromtimestamp(
secret['not_on_or_after']).strftime('%m/%d/%Y')})
print("adding client to notify List", client_id_, ":", client['sort'])
def notify_clients(clients_to_be_notified, developers):
developer_id_list = []
for client_secret in clients_to_be_notified:
developer_id_list.append(developers[client_secret['client_id']])
if developer_id_list:
db = get_mongo_db()
if db:
users = list(db.users.find({'guid': {'$in': developer_id_list}}, {'email', 'guid'}))
need_to_send_email = False
for user in users:
for client_secret in clients_to_be_notified:
if developers[client_secret['client_id']] == user['guid']:
client_secret['email'] = user['email']
need_to_send_email = True
break
if need_to_send_email:
return send_bulk_templated_email(clients_to_be_notified)
else:
return False
return True
def scan_clients(event, context):
local = False
if 'local' in event:
local = event['local'] == 'True'
if local:
dynamodb = boto3.resource('dynamodb', endpoint_url="http://localhost:8000")
else:
dynamodb = boto3.resource('dynamodb')
days = 30
if 'days' in event:
days = int(event['days'])
print(f"Scanning Clients with {days} or less to secret expiration")
table = dynamodb.Table('****')
scan_kwargs = {
'ProjectionExpression': 'client_id, sort, developer_id, secrets, approved'
}
test = False
if 'test' in event:
test = event['test'] == 'True'
done = False
start_key = None
developers = {}
clients_to_be_notified = []
if test:
developers['idm-portal1'] = '***'
clients_to_be_notified = [{'client_id': 'idm-portal1', 'expiration_date': '04/17/2021'}]
while not done:
if start_key:
scan_kwargs['ExclusiveStartKey'] = start_key
response = table.scan(**scan_kwargs)
process_clients(response.get('Items', []), developers, clients_to_be_notified, days)
start_key = response.get('LastEvaluatedKey', None)
done = start_key is None
print("total developers ", len(developers), " total clients_to_be_notified ", len(clients_to_be_notified))
return notify_clients(clients_to_be_notified, developers)
if __name__ == '__main__':
scan_clients(event={'days': 30, 'local': False, 'test': True}, context=None)
Response
{
"errorMessage": "Unable to import module 'test_client_visitor': No module named 'test_client_visitor'",
"errorType": "Runtime.ImportModuleError",
"stackTrace": []
}
Your file must be named test_client_visitor.py. The way lambda runs the code is by trying to import the main file and call the handler function. See the AWS docs to set up a handler for Python.
The reason you didn't run into this issue locally is because I assume you are calling python directly on the command line — python test_client_visitor. When you import a module in Python, the file has to end in the .py extension.
Able to fix this issue with right packaging of the contents to zip, avoided the creation of extra folder with the below command.
Command:
cd folder; zip -r ../filename.zip *
Thankyou everyone for your inputs.
I wanna make python client for Last.fm API. I wanna build kind of library.
I managed to get and set a session by getting a session key. Afterwards, I try to call a POST method that requires API_key, api_signature and session key. So I use the APi key I have, same api_signature I used to get the session key and the session key itself.
But I get an "invalid method signature" even though I use the same api_signature for the POST calls.
import json
import webbrowser
from hashlib import md5
import urllib3
class PyLast():
def __init__(self, API_KEY, SECRET, SESSION_KEY=None):
self.__API_KEY__ = API_KEY
self.__SECRET__ = SECRET
self.__SESSION_KEY__ = SESSION_KEY
self.__api_signature__ = None
if SESSION_KEY is None:
self.__is_authorized__ = False
else:
self.__is_authorized__ = True
self.__http__ = urllib3.PoolManager()
def request_token(self):
print("Getting the token...")
url = 'http://ws.audioscrobbler.com/2.0/?method=auth.gettoken&api_key={}&format=json'.format(self.__API_KEY__)
req_response = self.__http__.request('GET', url, headers={'User-Agent' : 'Mozilla/5.0'})
if req_response.status == 200:
json_data = json.loads(req_response.data.decode('utf-8'))
TOKEN = json_data['token']
self.__TOKEN__ = TOKEN
return TOKEN
else:
print("Error with code " + req_response.status)
def authorize(self):
if not self.__is_authorized__:
url = 'http://www.last.fm/api/auth/?api_key={}&token={}'.format(self.__API_KEY__, self.__TOKEN__)
# open browser to authorize app
webbrowser.open(url, new=0, autoraise=True)
# Make sure authorized
self.__is_authorized__ = True
def start_session(self):
if self.__is_authorized__:
data = "api_key{}methodauth.getSessiontoken{}{}" \
.format(self.__API_KEY__, self.__TOKEN__, self.__SECRET__).encode(
encoding='utf-8')
self.__api_signature__ = md5(data).hexdigest()
url = 'http://ws.audioscrobbler.com/2.0/?method=auth.getSession&api_key={}&token={}&api_sig={}&format=json'.format(
self.__API_KEY__, self.__TOKEN__, self.__api_signature__)
req_response = self.__http__.request('GET', url)
if req_response.status == 200:
json_data = json.loads(req_response.data.decode('utf-8'))
session_key = json_data['session']['key']
self.__SESSION_KEY__ = session_key
url = 'http://ws.audioscrobbler.com/2.0/?method=track.love&api_key={}&api_sig={}&sk={}&artist=cher&track=believe&format=json'.format(
self.__API_KEY__, self.__api_signature__, self.__SESSION_KEY__)
req_response = self.__http__.request('POST', url)
return self.__SESSION_KEY__
else:
print("Error with code " + str(req_response.status))
else:
print("Not authorized!")
I found a solution. The problem was that I was using the same parameters used to generate session key to make a POST call. The right way to sign a method for Last.fm API is to build the api_sig from the POST method we want to use. for example, to generate api_sig for track.love we use these parameters:
data = {"api_key": API_KEY,
"method": "track.love",
"track" : "yellow",
"artist" :"coldplay",
"sk" : SESSION_KEY
}
keys = sorted(data.keys())
param = [k+data[k] for k in keys]
param = "".join(param) + SECRET
api_sig = md5(param.encode()).hexdigest() # this api_sig used to sign track.love call.
I have some troubles with this application. What I need is that If I detect a change in the database (FIREBASE) particularly in 'sala' and 'ventilacion' nodes the function do what it have to do. If there isn't any change in the database it would not do nothing. I am using python and pyrebase library. Here is the code. Thank you very much for you help.
import pyrebase
import serial
import time
config = {
#firebase configurations
}
firebase = pyrebase.initialize_app(config)
db = firebase.database()
def ReconfiguracionFabrica():
ser.write('AT')
time.sleep(0.2)
ser.write('AT+RENEW')
time.sleep(0.3)
def ConfiguracionMaster():
time.sleep(0.5)
ser.write('AT+IMME1')
time.sleep(0.350)
ser.write('AT+ROLE1')
time.sleep(0.2)
ser = serial.Serial(port="/dev/ttyAMA0", baudrate=9600, timeout=1)
ReconfiguracionFabrica()
time.sleep(0.1)
ConfiguracionMaster()
time.sleep(0.1)
print "********** INICIO *************"
ser.flushInput()
contador = 0
prender = ''
ventilacion1 = ''
checkeo = ''
while True:
#if db.child("sala").: # It is the line where would be the conditional that allows me to detect any change only in the sala's node.
salidaLed1 = db.child("sala").get()
ser.write('AT')
time.sleep(0.1)
ser.write('AT+CON508CB16A7014')
time.sleep(0.1)
if salidaLed1.val() == True:
prender = ";"
if salidaLed1.val() == False:
prender = ","
ser.write('luz: %s \n' %(prender))
print ('luz: %s \n' %(prender))
time.sleep(1)
ser.read(checkeo)
if checkeo == 'j':
ReconfiguracionFabrica()
time.sleep(0.1)
ConfiguracionMaster()
Question: How to detect changes in firebase child
Note: All Examples use Public Access
Setup Example Data and verify it's readable.
This hase to be done once!
temperature_c = 30
data = {'date':time.strftime('%Y-%m-%d'),
'time':time.strftime('%H:%M:%S'),
'temperature':temperature_c}
db.child('public').child('Device_1').set(data)
response = db.child('public').child('Device_1').get()
print(response.val())
Create First Script doing Updates:
for t in [25, 26, 27, 28, 29, 30, 31, 32, 33, 35]:
temperature_c = t
data = {'date':time.strftime('%Y-%m-%d'),
'time':time.strftime('%H:%M:%S'),
'temperature':temperature_c}
db.child('public').child('Device_1').update(data)
time.sleep(60)
Create Second Script with Stream Handler
def stream_handler(message):
print('event={m[event]}; path={m[path]}; data={m[data]}'
.format(m=message))
my_stream =db.child('public').child('Device_1').stream(stream_handler)
# Run Stream Handler forever
while True:
data = input("[{}] Type exit to disconnect: ".format('?'))
if data.strip().lower() == 'exit':
print('Stop Stream Handler')
if my_stream: my_stream.close()
break
Run Stream Handler Script:
Response Output from def stream_handler after startup (Initial Data):
event="put"; path=/; data={'Device_1': {'temperature': 30, 'time': '13:34:24', 'date': '2017-07-20'}}
Run Updater Script:
Watch Output from Stream Handler Script
Response Output from def stream_handler after First Update Data:
event=patch; path=/Device_1; data={'temperature': 25, 'time': '13:49:12'}
Tested with Python: 3.4.2
Pyrebase
streaming
You can listen to live changes to your data with the stream() method.
def stream_handler(message):
print(message["event"]) # put
print(message["path"]) # /-K7yGTTEp7O549EzTYtI
print(message["data"]) # {'title': 'Pyrebase', "body": "etc..."}
my_stream = db.child("posts").stream(stream_handler)
You should at least handle put and patch events. Refer to "Streaming from the REST API" for details.
I know this post is 2 years old but hope this helps. Try using firebase_admin module.
Use this command - pip install firebase-admin
I too had a requirement where I needed to check for changes made to the Firebase database. I referred here
Following is a sample code based on your question which you can refer from and try it out.
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
cred = credentials.Certificate("path/to/serviceAccountKey.json")
firebase_admin.initialize_app(cred, {
'databaseURL': 'https://example.firebaseio.com',
'databaseAuthVariableOverride': None
})
def ignore_first_call(fn):
called = False
def wrapper(*args, **kwargs):
nonlocal called
if called:
return fn(*args, **kwargs)
else:
called = True
return None
return wrapper
#ignore_first_call
def listener(event):
print(event.event_type) # can be 'put' or 'patch'
print(event.path) # relative to the reference, it seems
print(event.data) # new data at /reference/event.path. None if deleted
node = str(event.path).split('/')[-2] #you can slice the path according to your requirement
property = str(event.path).split('/')[-1]
value = event.data
if (node=='sala'):
#do something
elif (node=='ventilacion'):
#do something
else:
#do something else
db.reference('/').listen(listener)
I was working on the same thing so according to current updates on pyrebase and learning from above posted answers, I got this running perfectly.(Please make sure your python is upgraded from python2 to python3 for running pyrebase and firebase-admin)
import firebase_admin
import pyrebase
from firebase_admin import credentials
config = {
"apiKey": "",
"authDomain": "",
"databaseURL": "",
"projectId": "",
"storageBucket": "",
"serviceAccount": "path to the service account json file you downloaded",
"messagingSenderId": "",
"appId": "",
"measurementId": ""
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
cred = credentials.Certificate("path to downloaded json file")
firebase_admin.initialize_app(cred, {
"databaseURL": "same as config",
"databaseAuthVariableOverride": None
})
db = firebase.database()
def ignore_first_call(fn):
called = False
def wrapper(*args, **kwargs):
nonlocal called
if called:
return fn(*args, **kwargs)
else:
called = True
return None
return wrapper
def stream_handler(message):
ab = str(1)
all_videos = storage.child("videos/").list_files() #node where files are
path_on_local = "local path to save the downloads"
print(message["event"]) # put
print(message["path"]) # /-K7yGTTEp7O549EzTYtI
print(message["data"]) # {'title': 'Pyrebase', "body": "etc..."}
node = str(message["path"]).split('/')[-2]
property = str(message["path"]).split('/')[-1]
value = message["data"]
if (message["event"] == "put"):
for videos in all_videos:
try:
print(videos.name)
z = storage.child(videos.name).get_url(None)
storage.child(videos.name).download(path_on_local + "/" + ab + ".mp4")
x = int(ab)
ab = str(x + 1)
except:
print('Download Failed')
else:
print("error")
my_stream = db.child("videos").stream(stream_handler)
I need some help implementing a python app that accesses the Quickbooks API. I have successfully written several apps that use APIs, but once we get into the OAuth world, I get a bit lost.
At any rate, I found the quickbooks-python wrapper here:
https://github.com/troolee/quickbooks-python
but there are zero examples of working code showing how to implement properly. I imagine that a more experienced python programmer could figure out how to make this work without any instructions, but it seems like I'm missing the basics.
If I could get it connected, I could probably get it to work from there...
It seems like the documentation on github jumps around and for a more experienced programmer, would probably make perfect sense. But I'm just not following...
from quickbooks import *
consumerKey = "fromApiConsole"
consumerSecret = "fromApiConsole"
callbackUrl = "https://quickbooks.api.intuit.com/v3"
qbObject = QuickBooks(
consumer_key = consumerKey,
consumer_secret = consumerSecret,
callback_url = callbackUrl
)
authorize_url = qbObject.get_authorize_url() # will create a service, and further set up the qbObject.
oauth_token = request.GET['oauth_token']
oauth_verifier = request.GET['oauth_verifier']
realm_id = request.GET['realmId']
session = qbObject.get_access_tokens(oauth_verifier)
# say you want access to the reports
reportType = "ProfitAndLoss"
url = "https://quickbooks.api.intuit.com/v3/company/asdfasdfas/"
url += "reports/%s" % reportType
r = session.request( #This is just a Rauth request
"POST",
url,
header_auth = True,
realm = realm_id,
params={"format":"json"}
)
qb = QuickBooks(
consumer_key = consumerKey,
consumer_secret = consumerSecret,
access_token = qbtoken.access_token, # the stored token
access_token_secret = qbtoken.access_token_secret, # the stored secret
company_id = qbtoken.realm_id #the stored realm_id
)
qbText = str(qb.query_objects(business_object, params, query_tail))
print qbText
I am pretty sure that I am:
importing the wrong modules/classes
missing huge pieces of code to "glue together" the samples found on github
not using django here and i know the request class above is in django, but i'd really like to just make this work as a python script without using django
not getting the token/identifier/realmId from the initial authorize_url function. it prints on the screen, but i'm not sure how to grab it...
The end goal here is really just to connect and get a P&L statement from Quickbooks Online. If I can get that far, I am sure I can get the rest of what I need out of the API. I don't really need to CHANGE anything, I'm just looking to include data from the reports into some dashboards.
* UPDATE *
okay, i figured out how to get it to connect, but i'm not sure how to get to the reports.
the answer was this, which was on the prior API page:
Accessing the API
Once you've gotten a hold of your QuickBooks access tokens, you can create a QB object:
qb = QuickBooks(consumer_key = QB_OAUTH_CONSUMER_KEY,
consumer_secret = QB_OAUTH_CONSUMER_SECRET,
access_token = QB_ACCESS_TOKEN,
access_token_secret = QB_ACCESS_TOKEN_SECRET,
company_id = QB_REALM_ID
)
still trying to get the basic reports...
Okay, so here's how to make this work. I'm focused on the reports, so here's how you can get reports from Quickbooks Online API using Python:
1) Go to https://github.com/finoptimal-dev/quickbooks-python and download the code
2) Make sure you have rauth installed. If you are on AWS/EC2, simply:
sudo yum install rauth
3) Edit the quickbooks2.py file and add the following to the END:
qb = QuickBooks(consumer_key = QB_OAUTH_CONSUMER_KEY,
consumer_secret = QB_OAUTH_CONSUMER_SECRET,
access_token = QB_ACCESS_TOKEN,
access_token_secret = QB_ACCESS_TOKEN_SECRET,
company_id = QB_REALM_ID
)
4) Setup a sandbox application on the Quickbooks site here: https://developer.intuit.com/v2/ui#/app/startcreate (you will have to create a developer account if you don't already have one)
5) Once setup, you can go to the "Keys" tab of the App and grab the App Token, OAuth Consumer Key and OAuth Consumer Secret.
6) Go to the Intuit Developer Playground at https://appcenter.intuit.com/Playground/OAuth/IA and use the info from step #5 to obtain the Access Token and Access Token Secret.
7) Change the variables in Step #3 to the correct values. For QB_REALM_ID, this is the Company ID. You can get this in the sandbox by logging into https://developer.intuit.com/v2/ui#/sandbox and looking for Company ID.
7) add the following code below the code from step #3 above
print qb.get_report('ProfitAndLoss','summarize_column_by=Month&start_date=2014-01-01&end_date=2014-12-31')
I use the above dates b/c the Quickbooks Sandbox company has no Income/Expense data in 2015, so you have to pick dates in 2014.
8) IMPORTANT: To use with the Quickbooks Sandbox for reporting purposes, you need to change the get_report() function to use the base_url_v3 instead of being hard-coded to the production URL.
Look for a row in the get_report() function that looks like this:
url = "https://quickbooks.api.intuit.com/v3/company/%s/" % \
and change it to this:
url = self.base_url_v3 + "/company/%s/" % \
9) Now you can change base_url_v3 all the way at the top to this:
base_url_v3 = "https://sandbox-quickbooks.api.intuit.com/v3"
10) And now you should now be able to run:
python quickbooks2.py
You should see a bunch of JSON data from the Quickbooks Sandbox company.
11) You can explore a bit to test out the appropriate URLs here: https://developer.intuit.com/apiexplorer?apiname=V3QBO#Reports
12) The report reference is here: https://developer.intuit.com/docs/0100_accounting/0400_references/reports and this shows you which parameters you can use. To test parameters in the Explorer, you enter them in the "Request Body" section.
I struggled with this for a while and finally figured it out. Hope this helps someone else.
I do not have much experience with Python but someone had shared this code with me for oauth earlier.If you have additional questions on the code, I will not be able to answer them.
NOTE: The below code also makes calls to V2 QBO apis. Please do not use that part as it is deprecated.
See if it helps-
Import Python
from rauth import OAuth1Session, OAuth1Service
import xml.etree.ElementTree as ET
import xmltodict
class QuickBooks():
"""A wrapper class around Python's Rauth module for Quickbooks the API"""
access_token = ''
access_token_secret = ''
consumer_key = ''
consumer_secret = ''
company_id = 0
callback_url = ''
session = None
base_url_v3 = "https://quickbooks.api.intuit.com/v3"
base_url_v2 = "https://qbo.intuit.com/qbo1"
request_token_url = "https://oauth.intuit.com/oauth/v1/get_request_token"
access_token_url = "https://oauth.intuit.com/oauth/v1/get_access_token"
authorize_url = "https://appcenter.intuit.com/Connect/Begin"
# Things needed for authentication
qbService = None
request_token = ''
request_token_secret = ''
def __init__(self, **args):
if 'consumer_key' in args:
self.consumer_key = args['consumer_key']
if 'consumer_secret' in args:
self.consumer_secret = args['consumer_secret']
if 'access_token' in args:
self.access_token = args['access_token']
if 'access_token_secret' in args:
self.access_token_secret = args['access_token_secret']
if 'company_id' in args:
self.company_id = args['company_id']
if 'callback_url' in args:
self.callback_url = args['callback_url']
def get_authorize_url(self):
"""Returns the Authorize URL as returned by QB,
and specified by OAuth 1.0a.
:return URI:
"""
self.qbService = OAuth1Service(
name = None,
consumer_key = self.consumer_key,
consumer_secret = self.consumer_secret,
request_token_url = self.request_token_url,
access_token_url = self.access_token_url,
authorize_url = self.authorize_url,
base_url = None
)
self.request_token, self.request_token_secret = self.qbService.get_request_token(
params={'oauth_callback':self.callback_url}
)
return self.qbService.get_authorize_url(self.request_token)
def get_access_tokens(self, oauth_verifier):
"""Wrapper around get_auth_session, returns session, and sets
access_token and access_token_secret on the QB Object.
:param oauth_verifier: the oauth_verifier as specified by OAuth 1.0a
"""
session = self.qbService.get_auth_session(
self.request_token,
self.request_token_secret,
data={'oauth_verifier': oauth_verifier})
self.access_token = session.access_token
self.access_token_secret = session.access_token_secret
return session
def create_session(self):
if self.consumer_secret and self.consumer_key and self.access_token_secret and self.access_token:
# print "hi"
session = OAuth1Session(self.consumer_key,
self.consumer_secret,
self.access_token,
self.access_token_secret,
)
# print session
self.session = session
else:
pass
#TODO: raise an error
return self.session
def keep_trying(self, r_type, url, header_auth, realm, payload=''):
if self.session != None:
session = self.session
else:
session = self.create_session()
self.session = session
trying = True
tries = 0
while trying:
print url
tries += 1
if "v2" in url:
r = session.request(r_type, url, header_auth, realm, data=payload)
r_dict = xmltodict.parse(r.text)
# print "DICT", r_dict
if "FaultInfo" not in r_dict or tries > 4:
trying = False
else:
# url = "https://qb.sbfinance.intuit.com/v3/company/184010684/query?query=SELECT * FROM JournalEntry"
# url = "https://quickbooks.api.intuit.com/v3/company/184010684/journalentry/24772"
# url = "https://quickbooks.api.intuit.com/v3/company/184010684/query?query='SELECT+*+FROM+JournalEntry'"
# url = "https://qb.sbfinance.intuit.com/v3/company/184010684/query?query=SELECT%20%2A%20FROM%20JournalEntry&"
print url, r_type
headers = {'Accept': 'application/json'}
r = session.request(r_type, url, header_auth, realm, headers = headers)
# r.headers
print "\n\n INITIAL TEXT \n\n", r.text
print "request headers:", r.request.headers
print "request URL:", r.request.url
print "response headers:", r.headers
r_dict = r.text
if "Fault" not in r_dict or tries > 4:
trying = False
r_dict = []
return r_dict
def fetch_customer(self, pk):
if pk:
url = self.base_url_v2 + "/resource/customer/v2/%s/%s" % ( self.company_id, pk)
r_dict = self.keep_trying("GET", url, True, self.company_id)
return r_dict['Customer']
def fetch_customers(self, all=False, page_num=0, limit=10):
if self.session != None:
session = self.session
else:
session = self.create_session()
self.session = session
# We use v2 of the API, because what the fuck, v3.
url = self.base_url_v2
url += "/resource/customers/v2/%s" % (self.company_id)
customers = []
if all:
counter = 1
more = True
while more:
payload = {
"ResultsPerPage":30,
"PageNum":counter,
}
trying = True
# Because the QB API is so iffy, let's try until we get an non-error
# Rewrite this to use same code as above.
while trying:
r = session.request("POST", url, header_auth = True, data = payload, realm = self.company_id)
root = ET.fromstring(r.text)
if root[1].tag != "{http://www.intuit.com/sb/cdm/baseexceptionmodel/xsd}ErrorCode":
trying = False
else:
print "Failed"
session.close()
qb_name = "{http://www.intuit.com/sb/cdm/v2}"
for child in root:
# print child.tag, child.text
if child.tag == "{http://www.intuit.com/sb/cdm/qbo}Count":
if int(child.text) < 30:
more = False
print "Found all customers"
if child.tag == "{http://www.intuit.com/sb/cdm/qbo}CdmCollections":
for customer in child:
customers += [xmltodict.parse(ET.tostring(customer))]
counter += 1
# more = False
# print more
else:
payload = {
"ResultsPerPage":str(limit),
"PageNum":str(page_num),
}
r = session.request("POST", url, header_auth = True, data = payload, realm = self.company_id)
root = ET.fromstring(r.text)
#TODO: parse for all customers
return customers
def fetch_sales_term(self, pk):
if pk:
url = self.base_url_v2 + "/resource/sales-term/v2/%s/%s" % ( self.company_id, pk)
r_dict = self.keep_trying("GET", url, True, self.company_id)
return r_dict
def fetch_invoices(self, **args):
if "query" in args:
payload = ""
if "customer" in args['query']:
payload = {
"Filter":"CustomerId :Equals: %s" % (args['query']['customer'])
}
# while more:
url = self.base_url_v2 + "/resource/invoices/v2/%s/" % (self.company_id)
r_dict = self.keep_trying("POST", url, True, self.company_id, payload)
invoices = r_dict['qbo:SearchResults']['qbo:CdmCollections']['Invoice']
return invoices
elif "pk" in args:
# TODO: Not tested
url = self.base_url_v2 + "/resource/invoice/v2/%s/%s" % ( self.company_id, args['pk'])
r_dict = self.keep_trying("GET", url, True, self.company_id)
return r_dict
else:
url = self.base_url_v2 + "/resource/invoices/v2/%s/" % (self.company_id)
r_dict = self.keep_trying("POST", url, True, self.company_id, payload)
return "BLAH"
def fetch_journal_entries(self, **args):
""" Because of the beautiful way that journal entries are organized
with QB, you're still going to have to filter these results for the
actual entity you're interested in. Luckily it only returns the entries
that are relevant to your search
:param query: a dictionary that includes 'customer', and the QB id of the
customer
"""
if "query" in args:
payload = {}
more = True
counter = 1
journal_entries = []
if "customer" in args['query']:
payload = {
"Filter":"CustomerId :Equals: %s" % (args['query']['customer'])
}
# payload = {
# "query":"SELECT * FROM JournalEntry",
# }
while more:
payload["ResultsPerPage"] = 30
payload["PageNum"] = counter
# url = self.base_url_v2 + "/resource/journal-entries/v2/%s/" % (self.company_id)
# url = self.base_url_v3 + "/company/%s/query" % (self.company_id)
url = "https://qb.sbfinance.intuit.com/v3/company/184010684/query?query=SELECT%20%2A%20FROM%20JournalEntry&"
r_dict = self.keep_trying("GET", url, True, self.company_id, payload)
more = False
# print r_dict['qbo:SearchResults']['qbo:Count']
counter = counter + 1
# if int(r_dict['qbo:SearchResults']['qbo:Count']) < 30:
# more = False
# journal_entry_set = r_dict['qbo:SearchResults']['qbo:CdmCollections']['JournalEntry']
# journal_entries += [journal_entry_set]
return []
# return r_dict['qbo:SearchResults']['qbo:CdmCollections']['JournalEntry']
elif "pk" in args:
# TODO: Not Tested
url = self.base_url_v2 + "/resource/journal-entry/v2/%s/%s" % ( self.company_id, args['pk'])
r_dict = self.keep_trying("GET", url, True, self.company_id)
return r_dict
else:
url = self.base_url_v2 + "/resource/journal-entries/v2/%s/" % (self.company_id)
r_dict = self.keep_trying("POST", url, True, self.company_id)
print r_dict
return "BLAH"
from rauth.service import OAuth1Service
class TwitterClient:
KNOWN_USERS = ['177512438-pJTdMgOPLJ8hCQFfDbPmkU3LRcCSHZd5VqoM3WaY', '7Z50XjV1CqDF1753Rzd4MkzSYN1oCY2FaaFtAZU']
def __init__(self):
# Get a real consumer key & secret from https://dev.twitter.com/apps/new
self.twitter = OAuth1Service(
name='twitter',
consumer_key='ZjXNoqpLfhQvYBgkjrrvxQ',
consumer_secret='8O4NjcsNed8uGICjDLNTjPiNwqjPezovWZIVru1c',
request_token_url='https://api.twitter.com/oauth/request_token',
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',
base_url='https://api.twitter.com/1/')
#self.KNOWN_USERS = ['177512438-pJTdMgOPLJ8hCQFfDbPmkU3LRcCSHZd5VqoM3WaY', '7Z50XjV1CqDF1753Rzd4MkzSYN1oCY2FaaFtAZU']
def new_session(self):
request_token, request_token_secret = self.twitter.get_request_token()
authorize_url = self.twitter.get_authorize_url(request_token)
print 'Visit this URL in your browser: ' + authorize_url
pin = raw_input('Enter PIN from browser: ')
session = self.twitter.get_auth_session(request_token,
request_token_secret,
method='POST',
data={'oauth_verifier': pin})
print session.access_token, session.access_token_secret # Save this to database
return session
def reuse_session(self, user2):
access_token = user2[0]
access_token_secret = user2[1]
session = self.twitter.get_session((access_token, access_token_secret))
return session
def init_session(self, user2):
if user2[0] == self.KNOWN_USERS[0] and user2[1] == self.KNOWN_USERS[1] : session = self.reuse_session(user2)
else : session = self.new_session()
return session
def list_tweets(self, user2):
session = self.init_session(user2)
params = {'include_rts': 1, # Include retweets
'count': 10} # 10 tweets
r = session.get('statuses/home_timeline.json', params=params)
for i, tweet in enumerate(r.json(), 1):
handle = tweet['user']['screen_name'].encode('utf-8')
text = tweet['text'].encode('utf-8')
print '{0}. #{1} - {2}'.format(i, handle, text)
tc = TwitterClient()
user1 = ['177512438-pJTdMgOPLJ8hCQFfDbPmkU3LRcCSHZd5VqoM3WaY', '7Z50XjV1CqDF1753Rzd4MkzSYN1oCY2FaaFtAZU']
tc.list_tweets( user1 )
But this gives the following error in line 50:
string indices must be integer
So instead I write this:
from rauth.service import OAuth1Service
class TwitterClient:
KNOWN_USERS = { # (access_token, access_token_secret)
'user1' : ('177512438-pJTdMgOPLJ8hCQFfDbPmkU3LRcCSHZd5VqoM3WaY', '7Z50XjV1CqDF1753Rzd4MkzSYN1oCY2FaaFtAZU')
}
def __init__(self):
# Get a real consumer key & secret from https://dev.twitter.com/apps/new
self.twitter = OAuth1Service(
name='twitter',
consumer_key='ZjXNoqpLfhQvYBgkjrrvxQ',
consumer_secret='8O4NjcsNed8uGICjDLNTjPiNwqjPezovWZIVru1c',
request_token_url='https://api.twitter.com/oauth/request_token',
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',
base_url='https://api.twitter.com/1.1/')
def new_session(self):
request_token, request_token_secret = self.twitter.get_request_token()
authorize_url = self.twitter.get_authorize_url(request_token)
print 'Visit this URL in your browser: ' + authorize_url
pin = raw_input('Enter PIN from browser: ')
session = self.twitter.get_auth_session(request_token,
request_token_secret,
method='POST',
data={'oauth_verifier': pin})
print session.access_token, session.access_token_secret # Save this to database
return session
def reuse_session(self, user1):
access_token, access_token_secret = self.KNOWN_USERS[user1]
session = self.twitter.get_session((access_token, access_token_secret))
return session
def init_session(self, user1):
if user1 in self.KNOWN_USERS : session = self.reuse_session(user1)
else : session = self.new_session()
return session
def list_tweets(self, user1):
session = self.reuse_session(user1)
params = {'include_rts': 1, # Include retweets
'count': 10} # 10 tweets
r = session.get('statuses/home_timeline.json', params=params)
print r.json
#for i, tweet in enumerate(r.json(), 1):
#handle = tweet['user']['screen_name'].encode('utf-8')
#text = tweet['text'].encode('utf-8')
#print '{0}. - {2}'.format(i, text)
tc = TwitterClient()
tc.list_tweets('user1')
But I get the following error:
<bound method Response.json of <Response [401]>>
With the minimal information you've given us, I'm guessing your GET request is failing, causing you to receive a JSON object that looks like this:
{"errors":[{"message":"Bad Authentication data","code":215}]}
This will decode as a dictionary, and when you iterate over it you'll get the keys in the dictionary, which are strings. When you attempt to index into those strings using dictionary keys, you'll get the error that you can only index into strings (or any sequences) using integers.
Verify this by printing r.json() before iterating over it. Fix it by reading the error message you're getting back and remedying the problem it's reporting to you.