SNS email notification based on lambda cloudwatch log with custom metrics - python

I have written a python script to get instance information over email with cron setup and populate metrics as well. With the following code i can see all the logs in cloudwatch logs console. However "dimension" never gets created under cloudwatch events section and not triggering any mail as well.
import boto3
import json
import logging
from datetime import datetime
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def post_metric(example_namespace, example_dimension_name, example_metric_name, example_dimension_value, example_metric_value):
cw_client = boto3.client("cloudwatch")
response = cw_client.put_metric_data(
Namespace=example_namespace,
MetricData=[
{
'MetricName': example_metric_name,
'Dimensions': [
{
'Name': example_dimension_name,
'Value': example_dimension_value
},
],
'Timestamp': datetime.datetime.now(),
'Value': int(example_metric_value)
},
]
)
def lambda_handler(event, context):
logger.info(event)
ec2_client = boto3.client("ec2")
sns_client = boto3.client("sns")
response = ec2_client.describe_instances(
Filters=[
{
'Name': 'tag:Name',
'Values': [
'jenkins-slave-*'
]
}
]
)['Reservations']
for reservation in response:
ec2_instances = reservation["Instances"]
for instance in ec2_instances:
myInstanceId = (instance['InstanceId'])
myInstanceState = (instance['State']['Name'])
myInstance = \
(
{
'InstanceId': (myInstanceId),
'InstanceState': (myInstanceState),
}
)
logger.info(json.dumps(myInstance)
post_metric("Jenkins", "ciname", "orphaned-slaves", myInstanceId, 1)
# Send message to SNS (Testing purpose)
SNS_TOPIC_ARN = 'arn:aws:sns:us-east-1:1234567890:example-instance-alarms'
sns_client.publish(
TopicArn = SNS_TOPIC_ARN,
Subject = 'Instance Info: ' + myInstanceId,
Message = 'Instance id: ' + myInstanceId
)
Can anyone please help if i am missing anything here. Thanks in advance.

You forgot to add required fields such as EvaluationPeriods, AlarmName and etc. to your put_metric_data according to documentation.
You can use this for an example.

Related

Facebook Business API Custom Audience delete_users function not working

I'm trying to remove users from an audience in the Facebook Ads API, using the delete_users function.
This is my code so far:
'''
from collections import UserList
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.customaudience import CustomAudience
from facebook_business.api import FacebookAdsApi
from facebook_business.api import FacebookRequest
access_token = 'XXX'
id = 'XXX'
api = FacebookAdsApi.init(access_token=access_token)
fields = []
params = {
'name': 'Test Audience',
'subtype': 'CUSTOM',
'description': 'People who purchased on my website',
'customer_file_source': 'USER_PROVIDED_ONLY',
}
print ( AdAccount(id).create_custom_audience(
fields=fields,
params=params,
) )
fields = []
params = {
'name': 'Test Audience',
'payload': hashed_list_of_emails
}
CustomAudience.delete_users(
self
fields=fields,
params=params,
method='DELETE',
endpoint='/users',
)
'''
I am receiving an error which tells me to replace self with node_id=self['id'], but I am unclear on what this is. I am also unclear on which part of the delete_users function should contain the audience_id, and where to insert the payload of the list of hashed email to remove from the audience.

AWS Lambda function for AWS Metric using Autoscaling groups

I am in the midst of coding a lambda function which will create an alarm based upon some disk metrics. The code so far looks like this:
import collections
from datetime import datetime
import calendar
def lambda_handler(event, context):
client = boto3.client('cloudwatch')
alarm = client.put_metric_alarm(
AlarmName='Disk Monitor',
MetricName='disk_used_percent',
Namespace='CWAgent',
Statistic='Maximum',
ComparisonOperator='GreaterThanOrEqualToThreshold',
Threshold=60.0,
Period=10,
EvaluationPeriods=3,
Dimensions=[
{
'Name': 'InstanceId',
'Value': '{instance_id}'
},
{
'Name': 'AutoScalingGroupName',
'Value': '{instance_id}'
},
{
'Name': 'fstype',
'Value': 'xfs'
},
{
'Name': 'path',
'Value': '/'
}
],
Unit='Percent',
ActionsEnabled=True)
As seen, {instance_id} is a variable because the idea is that this will be used for every instance. However, I am wondering how I would code the same for AutoScalingGroupName because I require this to be a variable also. I know that that the below pulls out the AutoScalingGroupName for me, but how would I add that to the above block in terms of syntax, is my problem:
aws autoscaling describe-auto-scaling-instances --output text --query "AutoScalingInstances[?InstanceId == '<instance_dets>'].{AutoScalingGroupName:AutoScalingGroupName}"
For example, would I add a block beginning as below:
def lambda_handler(event, context):
client = boto3.client('autoscaling')
And if so, how would I then code what is needed in terms of syntax to get the 'Value': '{AutoScalingGroupName}' by which I mean a variable to hold the ASG?
describe_auto_scaling_instances takes InstanceIds as a parameter. So if you know your instance_id you can find its asg as follows:
client = boto3.client('autoscaling')
response = client.describe_auto_scaling_instances(
InstanceIds=[instance_id])
asg_name = ''
if response['AutoScalingInstances']:
asg_name = response['AutoScalingInstances'][0]['AutoScalingGroupName']
print(asg_name)

How to get json representation from search_all_iam_policies() results

I'm working to implement the search_all_iam_policies() method in google-cloud-asset as follows:
from google.cloud import asset_v1
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com'
)
policies = []
for policy in response:
policies.append(policy)
return json.dumps({
'policies': policies
})
But cannot find a way to get JSON representation of policies nor policy. In this case 'response' is a google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager and each 'policy' is an google.cloud.asset_v1.types.assets.IamPolicySearchResult. I can print them to the console but need them in JSON format to send to another system.
Just to expand on Michaels answer.
When using that approach you "lose" some information namely the resource, project, asset_type and organization.
from google.cloud import asset_v1
from google.protobuf.json_format import MessageToJson
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com' # This field is optional
)
policies = []
for policy in response:
policies.append(
{
"resource": f"{policy.resource}",
"project": f"{policy.project}",
"bindings": json.loads(MessageToJson(policy.policy)).get('bindings'),
"asset_type": f"{policy.asset_type}",
"organization": f"{policy.organization}"
}
)
This will give you a list of dicts that look like the following:
{
'resource': '//some_resource',
'project': 'some_project',
'bindings': [
{
'role': 'some_role',
'members': [
'projectEditor:some_project',
'projectOwner:some_project'
]
},
{
'role': 'some_other_role',
'members': [
'projectViewer:some_project'
]
},
],
'asset_type': 'some_asset_type',
'organization': 'some_organization'
}
Found a way to decode the message like this:
from google.cloud import asset_v1
from google.protobuf.json_format import MessageToDict
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com'
)
policies = []
for policy in response:
policies.append(MessageToDict(policy.policy))
return json.dumps({
'policies': policies
})

How to integrate a python script and execute it on Django

what I want to happen is:
under some_list_of_contacts = [] will be the numbers from my django model.
i will create a html template for this script that when I click the button it will execute this send_sms.py.
under SMS_MESSAGEit will be the latest data coming from django model with timestamp. for example (As of {'timestamp'} the level is {'level'}).
I'm a beginner on Django and Python, please how can I do these? Thanks!
send_sms.py
import boto3
AWS_ACCESS_KEY_ID = "<>"
AWS_SECRET_ACCESS_KEY = "<>"
AWS_REGION_NAME = "eu-west-1"
SENDER_ID = "Test"
SMS_MESSAGE = "Test"
client = boto3.client(
"sns",
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION_NAME
)
topic = client.create_topic(Name="notifications")
topic_arn = topic['TopicArn']
some_list_of_contacts = [
'(must be numbers from django model)',
]
for number in some_list_of_contacts:
client.subscribe(
TopicArn=topic_arn,
Protocol='sms',
Endpoint=number
)
response = client.publish(
Message=SMS_MESSAGE,
TopicArn=topic_arn,
MessageAttributes={
'string': {
'DataType': 'String',
'StringValue': 'String',
},
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': SENDER_ID
}
}
)
print(response)
print("MessageId:" + response["MessageId"])
print("HTTPStatusCode:" + str(response["ResponseMetadata"]["HTTPStatusCode"]))

adaccount/reportstats is deprecated for versions v2.4 and higher

I'm trying to follow some examples from Python Facebook Marketing Api but, when I run:
i_async_job = account.get_insights(params={'level': 'adgroup'}, async=True)
r_async_job = account.get_report_stats(
params={
'data_columns': ['adgroup_id'],
'date_preset': 'last_30_days'
},
async=True
)
I'm getting
Status: 400
Response:
{
"error": {
"message": "(#12) adaccount/reportstats is deprecated for versions v2.4 and higher",
"code": 12,
"type": "OAuthException"
}
}
Even from Facebook
I found this page, but there are only curl examples.
Is there a working example on how to get data from Insights edge with the Python Ads API?
Here is a full example of how to export some insights asynchronously from the new Insights endpoints:
from facebookads import test_config as config
from facebookads.objects import *
import time
account_id = <YOUR_ACCOUNT_ID>
account_id = 'act_' + str(account_id)
fields = [
Insights.Field.impressions,
Insights.Field.clicks,
Insights.Field.actions,
Insights.Field.spend,
Insights.Field.campaign_group_name,
]
params = {
'date_preset': Insights.Preset.last_7_days,
'level': Insights.Level.adgroup,
'sort_by': 'date_start',
'sort_dir': 'desc',
}
ad_account = AdAccount(account_id)
job = ad_account.get_insights(fields=fields, params=params, async=True)
insights = None
while insights is None:
time.sleep(1)
job.remote_read()
completition = job[AsyncJob.Field.async_percent_completion]
print("Percent done: " + str(completition))
if int(completition) is 100:
insights = job.get_result(params={'limit': 100})
for ad_insight in insights:
print(ad_insight)

Categories