How to integrate a python script and execute it on Django - python

what I want to happen is:
under some_list_of_contacts = [] will be the numbers from my django model.
i will create a html template for this script that when I click the button it will execute this send_sms.py.
under SMS_MESSAGEit will be the latest data coming from django model with timestamp. for example (As of {'timestamp'} the level is {'level'}).
I'm a beginner on Django and Python, please how can I do these? Thanks!
send_sms.py
import boto3
AWS_ACCESS_KEY_ID = "<>"
AWS_SECRET_ACCESS_KEY = "<>"
AWS_REGION_NAME = "eu-west-1"
SENDER_ID = "Test"
SMS_MESSAGE = "Test"
client = boto3.client(
"sns",
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION_NAME
)
topic = client.create_topic(Name="notifications")
topic_arn = topic['TopicArn']
some_list_of_contacts = [
'(must be numbers from django model)',
]
for number in some_list_of_contacts:
client.subscribe(
TopicArn=topic_arn,
Protocol='sms',
Endpoint=number
)
response = client.publish(
Message=SMS_MESSAGE,
TopicArn=topic_arn,
MessageAttributes={
'string': {
'DataType': 'String',
'StringValue': 'String',
},
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': SENDER_ID
}
}
)
print(response)
print("MessageId:" + response["MessageId"])
print("HTTPStatusCode:" + str(response["ResponseMetadata"]["HTTPStatusCode"]))

Related

getting correct value from field

I am having issue on elasticsearch.
I can finally post to elasticsearch but the data is being written with a 'S' value instead of the actual value.
When I check my cloudwatch logs I see this.
{
"Records":[
{
"eventID":"d5d4955d706dd71348760a482f33735f",
"eventName":"INSERT",
"eventVersion":"1.1",
"eventSource":"aws:dynamodb",
"awsRegion":"us-east-1",
"dynamodb":{
"ApproximateCreationDateTime":1613816980.0,
"Keys":{
"uuid":{
"S":"c140a68de65301465cd1cd3d97cc4107"
}
},
"NewImage":{
"SmsStatus":{
"S":"received"
},
"streetname":{
"S":"King tut"
},
"timestampMessage":{
"S":"Sat Feb 20 2021 10:29:39 GMT+0000 (Coordinated Universal Time)"
}
}
}
}
]
}
this is the code I am using.
import os
import boto3
import requests
from requests_aws4auth import AWS4Auth
es_host = os.environ['ES_HOST']
es_index = "metadata"
es_type = "episodes"
url = es_host + '/' + es_index + '/' + es_type + '/'
region = 'us-east-1'
service = 'es'
credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)
def lambda_handler(event, context):
print(event)
for record in event['Records']:
id = str(record['dynamodb']['Keys']['uuid']['S'])
if record['eventName'] == 'REMOVE':
res = requests.delete(url + id, auth=awsauth)
else:
document = record['dynamodb']['NewImage']
res = requests.put(url + id, auth=awsauth, json=document, headers={"Content-Type": "application/json"})
Im sure this is something small to fix but im not certain how to please if someone could assist. thanks

How to get json representation from search_all_iam_policies() results

I'm working to implement the search_all_iam_policies() method in google-cloud-asset as follows:
from google.cloud import asset_v1
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com'
)
policies = []
for policy in response:
policies.append(policy)
return json.dumps({
'policies': policies
})
But cannot find a way to get JSON representation of policies nor policy. In this case 'response' is a google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager and each 'policy' is an google.cloud.asset_v1.types.assets.IamPolicySearchResult. I can print them to the console but need them in JSON format to send to another system.
Just to expand on Michaels answer.
When using that approach you "lose" some information namely the resource, project, asset_type and organization.
from google.cloud import asset_v1
from google.protobuf.json_format import MessageToJson
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com' # This field is optional
)
policies = []
for policy in response:
policies.append(
{
"resource": f"{policy.resource}",
"project": f"{policy.project}",
"bindings": json.loads(MessageToJson(policy.policy)).get('bindings'),
"asset_type": f"{policy.asset_type}",
"organization": f"{policy.organization}"
}
)
This will give you a list of dicts that look like the following:
{
'resource': '//some_resource',
'project': 'some_project',
'bindings': [
{
'role': 'some_role',
'members': [
'projectEditor:some_project',
'projectOwner:some_project'
]
},
{
'role': 'some_other_role',
'members': [
'projectViewer:some_project'
]
},
],
'asset_type': 'some_asset_type',
'organization': 'some_organization'
}
Found a way to decode the message like this:
from google.cloud import asset_v1
from google.protobuf.json_format import MessageToDict
ASSET_CLIENT = asset_v1.AssetServiceClient()
response = ASSET_CLIENT.search_all_iam_policies(
scope='projects/my_project',
query='my.email#domain.com'
)
policies = []
for policy in response:
policies.append(MessageToDict(policy.policy))
return json.dumps({
'policies': policies
})

Using quicksight boto3 update_data_source

Summary
What is the right syntax to use update_data_source Quicksight boto3 to change credentials ?
Context
I am trying to use update data source method for Quicksight on boto3 to update my Redshift credentials in Quicksight.
My issue is that it is passing a dictionary as key to another dictionary. How can I unpack that to get to the username / password for Redshift ?
Code
My code looks like this :
def main():
qs = boto3.client('quicksight', region_name=region_name)
response = qs.update_data_source(
AwsAccountId='awsaccountid',
DataSourceId='datasourceid',
Name='qs_test',
Credentials={
{
'CredentialPair':{
'Username': 'test_user'
'Password': 'my_pass'
}
}
}
)
print(response)
main()
Also tried the below
response = qs.update_data_source(
AwsAccountId='awsaccountid',
DataSourceId='datasourceid',
Name='qs_test',
Credentials={CredentialPair
{
RedshiftParameters=[
{
'Database': 'dbname',
'ClusterId': 'clusterid'
}
}
],
Credentials={
'CredentialPair': {
'Username': 'test_user',
'Password': 'my_pass'
}
}
)
print(response)
The below syntax works :
def main():
qs = boto3.client('quicksight', region_name=region_name)
response = qs.update_data_source(
AwsAccountId='awsaccountid',
DataSourceId='datasourceid',
Name='qs_test',
DataSourceParameters={
'RedshiftParameters'={
'Database': 'dbname',
'ClusterId': 'clusterid'
}
}
}
Credentials={
'CredentialPair':{
'Username': 'test_user'
'Password': 'my_pass'
}
}
)
print(response)
main()

Azure python sdk, how to deploy a vm and it's a Azure Spot instance

Azure python sdk,
how to deploy a vm and it's a Azure Spot instance
If you want to create Azure Spot VM, please refer to the following code. For more deatils, please refer to the doucment and the docuemnt
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.compute.v2019_07_01 import ComputeManagementClient
from azure.mgmt.compute.v2019_07_01.models import VirtualMachinePriorityTypes, VirtualMachineEvictionPolicyTypes, BillingProfile
SUBSCRIPTION_ID = 'subscription-id'
GROUP_NAME = 'myResourceGroup'
LOCATION = 'westus'
VM_NAME = 'myVM'
credentials = ServicePrincipalCredentials(
client_id = 'application-id',
secret = 'authentication-key',
tenant = 'tenant-id'
)
compute_client = ComputeManagementClient(
credentials,
SUBSCRIPTION_ID
)
vm_parameters = {
'location': LOCATION,
'os_profile': {
'computer_name': VM_NAME,
'admin_username': 'azureuser',
'admin_password': 'Azure12345678'
},
'hardware_profile': {
'vm_size': 'Standard_DS1'
},
'storage_profile': {
'image_reference': {
'publisher': 'MicrosoftWindowsServer',
'offer': 'WindowsServer',
'sku': '2012-R2-Datacenter',
'version': 'latest'
}
},
'network_profile': {
'network_interfaces': [{
'id': nic.id
}]
},
'priority':VirtualMachinePriorityTypes.spot, # use Azure spot intance
'eviction_policy':VirtualMachineEvictionPolicyTypes.deallocate , #For Azure Spot virtual machines, the only supported value is 'Deallocate'
'billing_profile': BillingProfile(max_price=float(2))
creation_result = compute_client.virtual_machines.create_or_update(
GROUP_NAME,
VM_NAME,
vm_parameters
)
print(creation_result.result())
}

SNS email notification based on lambda cloudwatch log with custom metrics

I have written a python script to get instance information over email with cron setup and populate metrics as well. With the following code i can see all the logs in cloudwatch logs console. However "dimension" never gets created under cloudwatch events section and not triggering any mail as well.
import boto3
import json
import logging
from datetime import datetime
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def post_metric(example_namespace, example_dimension_name, example_metric_name, example_dimension_value, example_metric_value):
cw_client = boto3.client("cloudwatch")
response = cw_client.put_metric_data(
Namespace=example_namespace,
MetricData=[
{
'MetricName': example_metric_name,
'Dimensions': [
{
'Name': example_dimension_name,
'Value': example_dimension_value
},
],
'Timestamp': datetime.datetime.now(),
'Value': int(example_metric_value)
},
]
)
def lambda_handler(event, context):
logger.info(event)
ec2_client = boto3.client("ec2")
sns_client = boto3.client("sns")
response = ec2_client.describe_instances(
Filters=[
{
'Name': 'tag:Name',
'Values': [
'jenkins-slave-*'
]
}
]
)['Reservations']
for reservation in response:
ec2_instances = reservation["Instances"]
for instance in ec2_instances:
myInstanceId = (instance['InstanceId'])
myInstanceState = (instance['State']['Name'])
myInstance = \
(
{
'InstanceId': (myInstanceId),
'InstanceState': (myInstanceState),
}
)
logger.info(json.dumps(myInstance)
post_metric("Jenkins", "ciname", "orphaned-slaves", myInstanceId, 1)
# Send message to SNS (Testing purpose)
SNS_TOPIC_ARN = 'arn:aws:sns:us-east-1:1234567890:example-instance-alarms'
sns_client.publish(
TopicArn = SNS_TOPIC_ARN,
Subject = 'Instance Info: ' + myInstanceId,
Message = 'Instance id: ' + myInstanceId
)
Can anyone please help if i am missing anything here. Thanks in advance.
You forgot to add required fields such as EvaluationPeriods, AlarmName and etc. to your put_metric_data according to documentation.
You can use this for an example.

Categories