I'm tried to insert telegram quick replies in dialog-flow response using fulfillment back-end i used python.
def results():
quick_replies_list =['book appointment','list of doctors','locaton']
title = "Hello and welcome."
platform = "TELEGRAM"
req = request.get_json()
print(req)
action = req.get('queryResult').get('action')
if action =="input.welcome":
result = req.get('result')
#req = request.get_json(silent=True, force=True)
result['quickReplies'] = quick_replies(platform, title, quick_replies_list)
res = jsonfy(result)
r = make_response(res)
return r
def quick_replies(platform, title, quick_replies_list):
quick_replies = []
for quick_reply in quick_replies_list:
# append to the list
quick_replies.append(
str(quick_reply)
)
return {
"quickReplies": {
"title": str(title),
"quickReplies": quick_replies
},
"platform": platform
}
Related
I have looked through the FTX API documentation found here: https://docs.ftx.us/#overview
And I've looked at the example code found in this repo: https://github.com/ftexchange/ftx/tree/master/rest
I am hitting POST Request on /wallet/withdrawals
but I am getting
{"success":false,"error":"Not logged in: Invalid signature","errorCode":"not_logged_in"}
Here is my Code:
resp = requests.get('https://otc.ftx.com/api/time')
ftx_t = resp.json()
time = ftx_t['result']
date_format = datetime.datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%f%z")
unix_time = datetime.datetime.timestamp(date_format)
ts = unix_time * 1000
# ts = int(time.time() * 1000)
request = Request('**POST**', '**https://ftx.us/api/wallet/withdrawals**') # To make withdrawas
prepared1 = request.prepare()
body = {
"coin": "USDT", # coin to withdraw
"size": 0, # amount to withdraw
"address": "***************" # address to send to
}
signature_payload1 = f'{ts}{prepared1.method}{prepared1.path_url}'.encode()
signature1 = hmac.new('**SECRET KEY**'.encode(), signature_payload1, 'sha256').hexdigest()
prepared1.headers[f'FTXUS-KEY'] = '**API KEY**'
prepared1.headers[f'FTXUS-SIGN'] = signature1
prepared1.headers[f'FTXUS-TS'] = str(ts)
prepared1.headers['Content-Type'] = 'application/json'
prepared1.headers['Accept'] = 'application/json'
data1 = json.dumps(body).encode()
res10 = requests.post('https://ftx.us/api/wallet/withdrawals', data=data1, headers=prepared1.headers)
You need to encode your data for sending post requests and add this encoded data in the body as well as when you are requesting a POST request.
This might help:
Before calling your signature1 variable you need to add the following:
prepared1.body = json.dumps(body).encode()
if prepared1.body:
signature_payload1 += prepared1.body
And in your POST request method, add the body in your Data Attribute:
res10 = requests.post('https://ftx.us/api/wallet/withdrawals', data=prepared1.body, headers=prepared1.headers)
Here I have edited the code for you:
resp = requests.get('https://otc.ftx.com/api/time')
ftx_t = resp.json()
time = ftx_t['result']
date_format = datetime.datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%f%z")
unix_time = datetime.datetime.timestamp(date_format)
ts = (unix_time * 1000)
request = Request('POST', 'https://ftx.us/api/wallet/withdrawals') # To make withdrawas
prepared1 = request.prepare()
signature_payload1 = f'{ts}{prepared1.method}{prepared1.path_url}'.encode()
body = {
"coin": "USDT", # coin to withdraw
"size": 0, # amount to withdraw
"address": "***************" # address to send to
}
prepared1.body = json.dumps(body).encode()
if prepared1.body:
signature_payload1 += prepared1.body
signature1 = hmac.new(SECRET KEY.encode(), signature_payload1,
'sha256').hexdigest()
prepared1.headers[f'FTXUS-KEY'] = API KEY
prepared1.headers[f'FTXUS-SIGN'] = signature1
prepared1.headers[f'FTXUS-TS'] = str(ts)
prepared1.headers['Content-Type'] = 'application/json'
prepared1.headers['Accept'] = 'application/json'
res10 = requests.post('https://ftx.us/api/wallet/withdrawals', data=prepared1.body, headers=prepared1.headers)
You can check these for reference:
https://blog.ftx.com/blog/api-authentication/
https://github.com/ftexchange/ftx/issues/10
AM building a USSD application, in Django with this API https://documenter.getpostman.com/view/7705958/UyrEhaLQ#intro. I get the responses from the API and initialize the data to be processed. But I don't get the menu (MSG) to display on the user phone successfully. The error I get is invalid(empty) response. This is the response to the user’s request. The content provider should provide a response to the request in the same format.
USERID = This is the ID provided by NALO to the client
MSISDN = The mobile number of the user
MSG =This is a mandatory parameter that holds the message to be displayed on the user’s phone
MSGTYPE = This indicates whether the session should continue or be terminated (True/false)
#csrf_exempt
def ussd(request):
if request.method == 'GET':
html = "<html><body>Nothing here baby!</body></html>"
return HttpResponse(html)
elif request.method == 'POST':
url = "https://99c9-102-176-94-213.ngrok.io/ussd"
response_data = json.loads(request.body)
code_id = response_data["USERID"]
serviceCode = response_data["MSISDN"]
type = response_data["MSGTYPE"]
session_id = response_data["SESSIONID"]
text = response_data["USERDATA"]
msg = ""
if text == "":
msg = "Welcome To TEST Dev"
elif text == "1":
msg = "This is Test Two"
payload ={
"USERID": code_id,
"MSISDN": serviceCode,
"MSGTYPE": type,
"USERDATA": text,
"SESSIONID": session_id,
"MSG": msg,
}
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=json.dumps(payload))
return HttpResponse(response, status=200)
I wanted to know why this error occurs. I know the key is not in the dictionary but how come? Line 94 is labeled in the code below.
Traceback (most recent call last): File "", line 154, in
cp.add_song_to_playlist()
File "", line 127, in add_song_to_playlist
playlist_id = self.create_playlist()
File "", line 94, in create_playlist
return response_json["id"] KeyError: 'id'
import json
import os
import google_auth_oauthlib.flow
import googleapiclient.discovery
import googleapiclient.errors
import requests
import youtube_dl
from exceptions import ResponseException
from secrets import spotify_token, spotify_user_id
class CreatePlaylist:
def __init__(self):
self.youtube_client = self.get_youtube_client()
self.all_song_info = {}
def get_youtube_client(self):
""" Log Into Youtube, Copied from Youtube Data API """
# Disable OAuthlib's HTTPS verification when running locally.
# *DO NOT* leave this option enabled in production.
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
api_service_name = "youtube"
api_version = "v3"
client_secrets_file = "client_secret.json"
# Get credentials and create an API client
scopes = ["https://www.googleapis.com/auth/youtube.readonly"]
flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(
client_secrets_file, scopes)
credentials = flow.run_console()
# from the Youtube DATA API
youtube_client = googleapiclient.discovery.build(
api_service_name, api_version, credentials=credentials)
return youtube_client
def get_liked_videos(self):
"""Grab Our Liked Videos & Create A Dictionary Of Important Song Information"""
request = self.youtube_client.videos().list(
part="snippet,contentDetails,statistics",
myRating="like"
)
response = request.execute()
# collect each video and get important information
for item in response["items"]:
video_title = item["snippet"]["title"]
youtube_url = "https://www.youtube.com/watch?v={}".format(
item["id"])
# use youtube_dl to collect the song name & artist name
video = youtube_dl.YoutubeDL({}).extract_info(
youtube_url, download=False)
song_name = video["track"]
artist = video["artist"]
if song_name is not None and artist is not None:
# save all important info and skip any missing song and artist
self.all_song_info[video_title] = {
"youtube_url": youtube_url,
"song_name": song_name,
"artist": artist,
# add the uri, easy to get song to put into playlist
"spotify_uri": self.get_spotify_uri(song_name, artist)
}
def create_playlist(self):
"""Create A New Playlist"""
request_body = json.dumps({
"name": "Youtube Liked Vids",
"description": "All Liked Youtube Videos",
"public": True
})
query = "https://api.spotify.com/v1/users/{}/playlists".format(
spotify_user_id)
response = requests.post(
query,
data=request_body,
headers={
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(spotify_token)
}
)
response_json = response.json()
# playlist id
try:
response_json = response.json()
except Exception:
return None
return response_json.get('id', None)
def get_spotify_uri(self, song_name, artist):
"""Search For the Song"""
query = "https://api.spotify.com/v1/search?query=track%3A{}+artist%3A{}&type=track&offset=0&limit=20".format(
song_name,
artist
)
response = requests.get(
query,
headers={
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(spotify_token)
}
)
response_json = response.json()
songs = response_json["tracks"]["items"]
# only use the first song
uri = songs[0]["uri"]
return uri
def add_song_to_playlist(self):
"""Add all liked songs into a new Spotify playlist"""
# populate dictionary with our liked songs
self.get_liked_videos()
# collect all of uri
uris = [info["spotify_uri"]
for song, info in self.all_song_info.items()]
# create a new playlist
playlist_id = self.create_playlist()
# add all songs into new playlist
request_data = json.dumps(uris)
query = "https://api.spotify.com/v1/playlists/{}/tracks".format(
playlist_id)
response = requests.post(
query,
data=request_data,
headers={
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(spotify_token)
}
)
# check for valid response status
if response.status_code != 200:
raise ResponseException(response.status_code)
response_json = response.json()
return response_json
if __name__ == '__main__':
cp = CreatePlaylist()
cp.add_song_to_playlist()
This exception occurs when you try to get undefined keys value with [] from dict. To gently handle this try to use .get('id', None).
try:
response_json = response.json()
except Exception:
return None
return response_json.get('id', None)
it will return None if there is no value for the key 'id'.
You can replace None with any fallback value.
I am using this API to get user emails for every member of a team. The teams are taken from a mongo database. I want to get this API to run for every team in the database and to return a list of emails for every member of every team. Right now, the API is only retrieving emails for members of the first team in the database.
with open('config/config.json') as f:
config = json.load(f)
API_KEY = config['API_KEY'] # loads API key
# Database info
client = MongoClient(config['HOST'], config['PORT'])
db = client[config['DATABASE']]
teams = list(db.teams.find()) # gets all the teams from the database
def list_users():
url = 'https://api.pagerduty.com/users'
headers = {
'Accept': 'application/vnd.pagerduty+json;version=2',
'Authorization': 'Token token={token}'.format(token=API_KEY)
}
result = []
for team in teams:
payload = {
'team_ids[]': team['team_id'],
'limit': 100,
'offset': 0
}
r = requests.get(url, headers=headers, params=payload)
users = r.json()['users']
emails = []
# loops for each user and retrieves their email
for user in users:
if r.status_code == 200:
emails.append(user['email'])
else:
return None
return emails
return result
How can I get this to run for every team from the database?
It looks like you're prematurely returning from your loop on teams. If you collect the emails in the result list instead, your issue is resolved.
def list_users():
url = 'https://api.pagerduty.com/users'
headers = {
'Accept': 'application/vnd.pagerduty+json;version=2',
'Authorization': 'Token token={token}'.format(token=API_KEY)
}
result = []
for team in teams:
payload = {
'team_ids[]': team['team_id'],
'limit': 100,
'offset': 0
}
r = requests.get(url, headers=headers, params=payload)
users = r.json()['users']
emails = []
# loops for each user and retrieves their email
for user in users:
if r.status_code == 200:
emails.append(user['email'])
result.append(emails)
return result
I'm working on a project with Python(3.6) & Django(1.10) in which I need to create a function at Google cloud using API request.
How can upload code in the form of a zip archive while creating that function?
Here's what I have tried:
From views.py :
def post(self, request, *args, **kwargs):
if request.method == 'POST':
post_data = request.POST.copy()
post_data.update({'user': request.user.pk})
form = forms.SlsForm(post_data, request.FILES)
print('get post request')
if form.is_valid():
func_obj = form
func_obj.user = request.user
func_obj.project = form.cleaned_data['project']
func_obj.fname = form.cleaned_data['fname']
func_obj.fmemory = form.cleaned_data['fmemory']
func_obj.entryPoint = form.cleaned_data['entryPoint']
func_obj.sourceFile = form.cleaned_data['sourceFile']
func_obj.sc_github = form.cleaned_data['sc_github']
func_obj.sc_inline_index = form.cleaned_data['sc_inline_index']
func_obj.sc_inline_package = form.cleaned_data['sc_inline_package']
func_obj.bucket = form.cleaned_data['bucket']
func_obj.save()
service = discovery.build('cloudfunctions', 'v1', http=views.getauth(), cache_discovery=False)
requ = service.projects().locations().functions().generateUploadUrl(parent='projects/' + func_obj.project + '/locations/us-central1', body={})
resp = requ.execute()
print(resp)
try:
auth = views.getauth()
# Prepare Request Body
req_body = {
"CloudFunction": {
"name": func_obj.fname,
"entryPoint": func_obj.entryPoint,
"timeout": '60s',
"availableMemoryMb": func_obj.fmemory,
"sourceArchiveUrl": func_obj.sc_github,
},
"sourceUploadUrl": func_obj.bucket,
}
service = discovery.build('cloudfunctions', 'v1beta2', http=auth, cachce_dicovery=False)
func_req = service.projects().locations().functions().create(location='projects/' + func_obj.project
+ '/locations/-',
body=req_body)
func_res = func_req.execute()
print(func_res)
return HttpResponse('Submitted',)
except:
return HttpResponse(status=500)
return HttpResponse('Sent!')
Updated Code below:
if form.is_valid():
func_obj = form
func_obj.user = request.user
func_obj.project = form.cleaned_data['project']
func_obj.fname = form.cleaned_data['fname']
func_obj.fmemory = form.cleaned_data['fmemory']
func_obj.entryPoint = form.cleaned_data['entryPoint']
func_obj.sourceFile = form.cleaned_data['sourceFile']
func_obj.sc_github = form.cleaned_data['sc_github']
func_obj.sc_inline_index = form.cleaned_data['sc_inline_index']
func_obj.sc_inline_package = form.cleaned_data['sc_inline_package']
func_obj.bucket = form.cleaned_data['bucket']
func_obj.save()
#######################################################################
# FIRST APPROACH FOR FUNCTION CREATION USING STORAGE BUCKET
#######################################################################
file_name = os.path.join(IGui.settings.BASE_DIR, 'media/archives/', func_obj.sourceFile.name)
print(file_name)
service = discovery.build('cloudfunctions', 'v1')
func_api = service.projects().locations().functions()
url_svc_req = func_api.generateUploadUrl(parent='projects/'
+ func_obj.project
+ '/locations/us-central1',
body={})
url_svc_res = url_svc_req.execute()
print(url_svc_res)
upload_url = url_svc_res['uploadUrl']
print(upload_url)
headers = {
'content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600'
}
print(requests.put(upload_url, headers=headers, data=func_obj.sourceFile.name))
auth = views.getauth()
# Prepare Request Body
name = "projects/{}/locations/us-central1/functions/{}".format(func_obj.project, func_obj.fname,)
print(name)
req_body = {
"name": name,
"entryPoint": func_obj.entryPoint,
"timeout": "3.5s",
"availableMemoryMb": func_obj.fmemory,
"sourceUploadUrl": upload_url,
"httpsTrigger": {},
}
service = discovery.build('cloudfunctions', 'v1')
func_api = service.projects().locations().functions()
response = func_api.create(location='projects/' + func_obj.project + '/locations/us-central1',
body=req_body).execute()
pprint.pprint(response)
Now the function has been created successfully, but it fails because the source code doesn't upload to storage bucket, that's maybe something wrong at:
upload_url = url_svc_res['uploadUrl']
print(upload_url)
headers = {
'content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600'
}
print(requests.put(upload_url, headers=headers, data=func_obj.sourceFile.name))
In the request body you have a dictionary "CloudFunction" inside the request. The content of "CloudFunction" should be directly in request.
request_body = {
"name": parent + '/functions/' + name,
"entryPoint": entry_point,
"sourceUploadUrl": upload_url,
"httpsTrigger": {}
}
I recomend using "Try this API" to discover the structure of projects.locations.functions.create .
"sourceArchiveUrl" and "sourceUploadUrl" can't appear together. This is explained in Resorce Cloud Function:
// Union field source_code can be only one of the following:
"sourceArchiveUrl": string,
"sourceRepository": { object(SourceRepository) },
"sourceUploadUrl": string,
// End of list of possible types for union field source_code.
In the rest of the answer I assume that you want to use "sourceUploadUrl". It requires you to pass it a URL returned to you by .generateUploadUrl(...).execute(). See documentation:
sourceUploadUrl -> string
The Google Cloud Storage signed URL used for source uploading,
generated by [google.cloud.functions.v1.GenerateUploadUrl][]
But before passing it you need to upload a zip file to this URL:
curl -X PUT "${URL}" -H 'content-type:application/zip' -H 'x-goog-content-length-range: 0,104857600' -T test.zip
or in python:
headers = {
'content-type':'application/zip',
'x-goog-content-length-range':'0,104857600'
}
print(requests.put(upload_url, headers=headers, data=data))
This is the trickiest part:
the case matters and it should be lowercase. Because the signature is calculated from a hash (here)
you need 'content-type':'application/zip'. I deduced this one logically, because documentation doesn't mention it. (here)
x-goog-content-length-range: min,max is obligatory for all PUT requests for cloud storage and is assumed implicitly in this case. More on it here
104857600, the max in previous entry, is a magical number which I didn't found mentioned anywhere.
where data is a FileLikeObject.
I also assume that you want to use the httpsTrigger. For a cloud function you can only choose one trigger field. Here it's said that trigger is a Union field. For httpsTrigger however that you can just leave it to be an empty dictionary, as its content do not affect the outcome. As of now.
request_body = {
"name": parent + '/functions/' + name,
"entryPoint": entry_point,
"sourceUploadUrl": upload_url,
"httpsTrigger": {}
}
You can safely use 'v1' instead of 'v1beta2' for .create().
Here is a full working example. It would be to complicated if I presented it to you as part of your code, but you can easily integrate it.
import pprint
import zipfile
import requests
from tempfile import TemporaryFile
from googleapiclient import discovery
project_id = 'your_project_id'
region = 'us-central1'
parent = 'projects/{}/locations/{}'.format(project_id, region)
print(parent)
name = 'ExampleFunctionFibonacci'
entry_point = "fibonacci"
service = discovery.build('cloudfunctions', 'v1')
CloudFunctionsAPI = service.projects().locations().functions()
upload_url = CloudFunctionsAPI.generateUploadUrl(parent=parent, body={}).execute()['uploadUrl']
print(upload_url)
payload = """/**
* Responds to any HTTP request that can provide a "message" field in the body.
*
* #param {Object} req Cloud Function request context.
* #param {Object} res Cloud Function response context.
*/
exports.""" + entry_point + """= function """ + entry_point + """ (req, res) {
if (req.body.message === undefined) {
// This is an error case, as "message" is required
res.status(400).send('No message defined!');
} else {
// Everything is ok
console.log(req.body.message);
res.status(200).end();
}
};"""
with TemporaryFile() as data:
with zipfile.ZipFile(data, 'w', zipfile.ZIP_DEFLATED) as archive:
archive.writestr('function.js', payload)
data.seek(0)
headers = {
'content-type':'application/zip',
'x-goog-content-length-range':'0,104857600'
}
print(requests.put(upload_url, headers=headers, data=data))
# Prepare Request Body
# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions#resource-cloudfunction
request_body = {
"name": parent + '/functions/' + name,
"entryPoint": entry_point,
"sourceUploadUrl": upload_url,
"httpsTrigger": {},
"runtime": 'nodejs8'
}
print('https://{}-{}.cloudfunctions.net/{}'.format(region,project_id,name))
response = CloudFunctionsAPI.create(location=parent, body=request_body).execute()
pprint.pprint(response)
Open and upload a zip file like following:
file_name = os.path.join(IGui.settings.BASE_DIR, 'media/archives/', func_obj.sourceFile.name)
headers = {
'content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600'
}
with open(file_name, 'rb') as data:
print(requests.put(upload_url, headers=headers, data=data))