I have set up an AWS PinPoint project and I'm trying to test it by sending an event from a lambda function:
import boto3
import datetime
import time
client = boto3.client('pinpoint')
app_id = '1234abcd'
endpoint_id = 'test_endpoint'
address = 'test#test.com'
def lambda_handler(event, context):
response = client.put_events(
ApplicationId = app_id,
EventsRequest={
'BatchItem': {
endpoint_id: {
'Endpoint': {
'ChannelType': 'EMAIL',
'Address': address,
'Attributes': {
'Cart': ['Hat'],
'Purchased': ['No']
}
},
'Events':{
'cart-event-2': {
'Attributes':{
'AddedToCart': 'Hat'
},
'EventType': 'AddToCartEvent',
'Metrics': {
'price': 29.95
},
'Timestamp': datetime.datetime.fromtimestamp(time.time()).isoformat()
}
}
}
}
}
)
return response
But I am receiving an error that the resource cannot be found, even though I can see it in Pin Point console:
{
"errorMessage": "An error occurred (NotFoundException) when calling the PutEvents operation: Resource not found",
"errorType": "NotFoundException",
"requestId": "xxxxx-xxxxx-xxxx-xxxx-xxxxxxxxx",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 12, in lambda_handler\n response = client.put_events(\n",
" File \"/var/runtime/botocore/client.py\", line 391, in _api_call\n return self._make_api_call(operation_name, kwargs)\n",
" File \"/var/runtime/botocore/client.py\", line 719, in _make_api_call\n raise error_class(parsed_response, operation_name)\n"
]
}
Turns out I was just in the wrong region on my AWS account. ðŸ§
I created the AWS pinpoint project in one region but was trying to send events to the project from another AWS region, which was why I was getting the NotFoundException.
Related
I'm using urllib3 library in Lambda and python3 code that fetches the webhook url of MSTeams from AWS Secret Manager and sends a http post request to publish a notification.
My webhook url starts with https and looks like this "https://outlook.office.com/webhook/.......". On executing the lambda function, I get an error as shown below LocationParseError Failed to parse:
Code
import urllib3
http = urllib3.PoolManager()
MSTEAMS_WEBHOOK_SECRET_NAME = os.getenv('MSTEAMS_WEBHOOK_SECRET_NAME')
HOOK_URL = get_secret(MSTEAMS_WEBHOOK_SECRET_NAME,"eu-west-1")
def get_secret(secret_name, region_name):
# Create a Secrets Manager client
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name
)
get_secret_value_response = client.get_secret_value(
SecretId=secret_name,
VersionStage="AWSCURRENT"
)
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
return secret
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
return decoded_binary_secret
def lambda_handler(event, context):
message = {
"#context": "https://schema.org/extensions",
"#type": "MessageCard",
"themeColor": data["colour"],
"title": title,
"text": "accountId:\n" + account_id + " <br/>\n"
}
webhook_encoded_body = json.dumps(message).encode('utf-8')
response = http.request('POST',HOOK_URL, body=webhook_encoded_body)
errorMessage
{
"errorMessage": "Failed to parse: {\"msteams-secret\":\"https://outlook.office.com/webhook/dxxxxxx#d779xxxxx-xxxxxx/IncomingWebhook/axxxxxx5/ca746326-bxxx-4xxx-8x-xxxxx\"}",
"errorType": "LocationParseError",
"stackTrace": [
[
"/var/task/lambda_function.py",
145,
"lambda_handler",
"resp = http.request('POST',HOOK_URL, body=webhook_encoded_body)"
],
[
"/var/runtime/urllib3/request.py",
80,
"request",
"method, url, fields=fields, headers=headers, **urlopen_kw"
],
[
"/var/runtime/urllib3/request.py",
171,
"request_encode_body",
"return self.urlopen(method, url, **extra_kw)"
],
[
"/var/runtime/urllib3/poolmanager.py",
324,
"urlopen",
"u = parse_url(url)"
],
[
"/var/runtime/urllib3/util/url.py",
392,
"parse_url",
"return six.raise_from(LocationParseError(source_url), None)"
],
[
"<string>",
3,
"raise_from",
""
]
]
}
Here is how I solved it
Deployed the lambda zip file again, with correct dependencies like requests, urllib3 in the same folder
Apparently, I was trying to store the secret as key/value pair in AWS Secret manager so it was not able to parse a dictionary. I changed the secret type to plaintext
I have been getting this error for days and unable to sort out whats the issues on this code:
"errorMessage": "Parameter validation failed:\nInvalid type for parameter Dimensions[0].Value, value: {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}, type: <class 'dict'>, valid types: <class 'str'>",
"errorType": "ParamValidationError",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 26, in bucket_size\n Unit='Bytes'\n",
" File \"/var/runtime/botocore/client.py\", line 320, in _api_call\n return self._make_api_call(operation_name, kwargs)\n",
" File \"/var/runtime/botocore/client.py\", line 596, in _make_api_call\n api_params, operation_model, context=request_context)\n",
" File \"/var/runtime/botocore/client.py\", line 632, in _convert_to_request_dict\n api_params, operation_model)\n",
" File \"/var/runtime/botocore/validate.py\", line 291, in serialize_to_request\n raise ParamValidationError(report=report.generate_report())\n"
]
My python 3.7 code:
import boto3
from datetime import datetime, timedelta
import json
def bucket_size(a, b):
bucket_name = a
cloudwatch = boto3.client('cloudwatch',region_name='ap-southeast-1')
response = cloudwatch.get_metric_statistics(
Namespace="AWS/S3",
MetricName="BucketSizeBytes",
Dimensions=[
{
'Name': 'BucketName',
'Value': bucket_name
},
{
'Name': 'StorageType',
'Value': 'StandardStorage'
}
],
Statistics=['Average'],
Period=86400,
StartTime=datetime.now()-timedelta(days=10),
EndTime=datetime.now()-timedelta(days=2),
Unit='Bytes'
)
i am trying to get the metric from S3 and pipe to a .csv file on specific S3 bucket, but i encounter this error on lambda python 3.7
Any help appreciated, open alot of tabs to find answers online but not available, thanks and appreciated ! Cheers
Not sure but i think is on
Dimensions=[
{
'Name': 'BucketName',
'Value': bucket_name
},
{
'Name': 'StorageType',
'Value': 'StandardStorage'
}
]
You are passing a list of dicts objects and module is expecting a list of string objects.
I am trying to create/delete Snapshot of my AWS RDS through function of AWS Lamda (python 3.6), but I don't know where I am doing wrong, this function also delete Snapshot from 7 days old, so anyone can suggest me on this
import boto3
import datetime
def lambda_handler(event, context):
print("Connecting to RDS")
client = boto3.client('rds')
dbInstances = ['magento-live']
for dbInstance in dbInstances:
print("RDS snapshot backups started at %s...\n" % datetime.datetime.now())
client.create_db_snapshot(
DBInstanceIdentifier=dbInstance,
DBSnapshotIdentifier=dbInstance+'{}'.format(datetime.datetime.now().strftime("%y-%m-%d-%H")),
Tags=[
{
'Key': 'NI',
'Value': 'NIRDS'
},
]
)
for snapshot in client.describe_db_snapshots(DBInstanceIdentifier=dbInstance, MaxRecords=50)['DBSnapshots']:
createTs = snapshot['SnapshotCreateTime'].replace(tzinfo=None)
if createTs < datetime.datetime.now() - datetime.timedelta(days=7):
print("Deleting snapshot id:", snapshot['DBSnapshotIdentifier'])
client.delete_db_snapshot(
DBSnapshotIdentifier=snapshot['DBSnapshotIdentifier']
)
But always I am getting below error
{
"errorMessage": "An error occurred (InvalidParameterValue) when calling the CreateDBSnapshot operation: The specified instance is a member of a cluster and a snapshot cannot be created directly. Please use the CreateDBClusterSnapshot API instead.",
"errorType": "ClientError",
"stackTrace": [
[
"/var/task/lambda_function.py",
19,
"lambda_handler",
"'Value': 'NIRDS'"
],
[
"/var/runtime/botocore/client.py",
312,
"_api_call",
"return self._make_api_call(operation_name, kwargs)"
],
[
"/var/runtime/botocore/client.py",
605,
"_make_api_call",
"raise error_class(parsed_response, operation_name)"
]
]
}
Aron is right. You have use a different method (that applies to aurora cluster and not a DB instance).
Here is a lambda function that worked for me:
def lambda_handler(event, context):
print("Connecting to RDS")
client = boto3.client('rds')
print("RDS snapshot backups stated at %s...\n" % datetime.datetime.now())
client.create_db_cluster_snapshot(
DBClusterIdentifier='enter-your-cluster-name-her',
DBClusterSnapshotIdentifier='enter-your-cluster-name-here-%s' % datetime.datetime.now().strftime("%y-%m-%d-%H"),
Tags=[
{
'Key': 'ENV',
'Value': 'dev'
},
]
)
I tried using the basic example in the aws documentation, by starting dynamoDB as follows.
java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
And it produces the output
Initializing DynamoDB Local with the following configuration:
Port: 8000
InMemory: false
DbPath: null
SharedDb: true
shouldDelayTransientStatuses: false
CorsParams: *
The example python code is as follows.
from __future__ import print_function # Python 2/3 compatibility
import boto3
dynamodb = boto3.resource('dynamodb', region_name='us-west-2',endpoint_url='http://localhost:8000')
table = dynamodb.create_table(
TableName='Movies',
KeySchema=[
{
'AttributeName': 'year',
'KeyType': 'HASH' #Partition key
},
{
'AttributeName': 'title',
'KeyType': 'RANGE' #Sort key
}
],
AttributeDefinitions=[
{
'AttributeName': 'year',
'AttributeType': 'N'
},
{
'AttributeName': 'title',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
But I get the output :
Traceback (most recent call last):
File "test.py", line 32, in <module>
'WriteCapacityUnits': 10
File "/home/janga/anaconda2/lib/python2.7/site-packages/boto3/resources/factory.py", line 520, in do_action
response = action(self, *args, **kwargs)
File "/home/janga/anaconda2/lib/python2.7/site-packages/boto3/resources/action.py", line 83, in __call__
response = getattr(parent.meta.client, operation_name)(**params)
File "/home/janga/.local/lib/python2.7/site-packages/botocore/client.py", line 251, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/home/janga/.local/lib/python2.7/site-packages/botocore/client.py", line 537, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (503) when calling the CreateTable operation (reached max retries: 9): <HTML><TITLE>503 Service Unavailable</TITLE>
<H1>503 Service Unavailable</H1>
Failed to connect to server <B>localhost</B></HTML>
I couldn't find any solution anywhere for this issue.
The only reason I can think of is, that I am on a university network which runs behind a proxy server. But I can't find any solution for that.
Please help.
I'm trying to post files to my graphql endpoint # scaphold.io. I normally write Javascript, but I have to translate this particular operation into Python.
The command to do so with curl looks like this (from their docs):
curl -v https://us-west-2.api.scaphold.io/graphql/scaphold-graphql \
-H "Content-Type:multipart/form-data" \
-F 'query=mutation CreateFile($input: CreateFileInput!) { createFile(input: $input) { changedFile { id name blobMimeType blobUrl user { id username } } } }' \
-F 'variables={ "input": { "name": "Profile Picture", "userId": "VXNlcjoxMA==", "blobFieldName": "myBlobField" } };type=application/json' \
-F myBlobField=#mark-zuckerberg.jpg
Where the blobFieldName prop in variables matches the Form field name that holds the file to upload.
Using Requests, I've gotten this far:
import requests
from requests_toolbelt import MultipartEncoder
url = 'https://us-west-2.api.scaphold.io/graphql/scaphold-graphql'
multipart_data = MultipartEncoder(
fields={
"query":"mutation CreateFile($input: CreateFileInput!) { createFile(input: $input) { changedFile { id name blobMimeType blobUrl user { id username } } } }",
"variables": { "input": { "name": "Profile Picture", "userId": "VXNlcjoxMA==", "blobFieldName": "myBlobField" } },
"type":'application/json',
"myBlobField": ('example.jpg', open('example.jpg', 'rb'), 'image/jpeg' )
}
)
req_headers = {'Content-Type':multipart_data.content_type, 'Authorization':'Bearer myreallylongkey'}
r = requests.post(url, data=multipart_data, headers=req_headers)
Unfortunately this is met with the AttributeError:
Traceback (most recent call last):
File "test-gql.py", line 38, in <module>
"myBlobField": ('example.jpg', open('example.jpg', 'rb'), 'image/jpeg' )
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 119, in __init__
self._prepare_parts()
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 240, in _prepare_parts
self.parts = [Part.from_field(f, enc) for f in self._iter_fields()]
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 488, in from_field
body = coerce_data(field.data, encoding)
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 466, in coerce_data
return CustomBytesIO(data, encoding)
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 529, in __init__
buffer = encode_with(buffer, encoding)
File "/home/bmp/code/wayhome/python-phash/requests_toolbelt/multipart/encoder.py", line 410, in encode_with
return string.encode(encoding)
AttributeError: 'dict' object has no attribute 'encode'
I'm afraid I'm not Pythonic enough to grok this error, but I've eliminated a few suspects:
Translating a simple query from curl to Python works fine, so I know it isn't permissions, etc.
Using a different file, of the plain/text flavor, fails w/ the same error
Using a tuple-of-tuples instead of a dict for fields (as described here)has no effect
From your example , it seems that you are passing "variables" as a dictionary but instead it should be a string. Change
"variables": { "input": { "name": "Profile Picture", "userId": "VXNlcjoxMA==", "blobFieldName": "myBlobField" } },
to
"variables": '{ "input": { "name": "Profile Picture", "userId": "VXNlcjoxMA==", "blobFieldName": "myBlobField" } }',
Please note the use to single quotes to make it a string
EDIT :: from the code of MultipartEncoder, MultipartEncoder tries to run .encode(...) method on the values. .encode(...) comes with strings. Because the datatype of value of key "variables" is dict, .encode(...) seems to be failing on it.