Hi I am writing a lambda function that will update the DynamoDb using boto3. In this code employee_id is auto-generated but you have to provide last_name or first_name. I am doing it with if-else. If the attribute tends to increase so does the checks. I can't keep on going with if condition. How can I tackle this what changes should I make
import boto3
import json
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Employee')
def lambda_handler(event, context):
employee_id = event['employee_id']
if 'first_name' in event and 'last_name' not in event:
first_name = event['first_name']
UpdateExpression = 'SET first_name = :val1'
ExpressionAttributeValues = {':val1': first_name }
elif 'last_name' in event and 'first_name' not in event:
last_name = event['last_name']
UpdateExpression = 'SET last_name = :val1'
ExpressionAttributeValues = {':val1': last_name}
elif 'first_name' in event and 'last_name' in event:
last_name = event['last_name']
first_name= event['first_name']
UpdateExpression = 'SET last_name = :val1, first_name = :val2'
ExpressionAttributeValues = {
':val1': last_name,
':val2': first_name
}
else:
raise ValueError("first_name and last_name not given")
update = table.update_item(
Key={
'employee_id': employee_id
},
ConditionExpression= 'attribute_exists(employee_id)',
UpdateExpression=UpdateExpression,
ExpressionAttributeValues=ExpressionAttributeValues
)
The code that I came up with but is not working
import boto3
import json
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Employee')
def lambda_handler(event, context):
employee_id = event['employee_id']
last_name= event['last_name']
first_name= event['first_name']
column = [first_name,last_name]
for i in range(0,len(column):
query = 'SET {} = :val1,:val2'.format(column[i])
response = table.update_item(
Key={
'employee_id': employee_id
},
ConditionExpression= 'attribute_exists(employee_id)',
UpdateExpression = query,
ExpressionAttributeValues={
':val1': first_name,
':val2': last_name
},
ReturnValues="UPDATED_NEW"
)
You should look at storing the update expression and expression values separately, then passing the complete set into the Lambda function.
This would also allow you to validate against each parameter (perhaps breaking this into a validate function to avoid excessive size of function). This way you support both required and optional parameters, then at the end validate that the update expression has valid parameters.
Perhaps something like the below?
import boto3
import json
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Employee')
def lambda_handler(event, context):
update_expression_values = []
expression_attribute_values = {}
if 'employee_id' in event:
employee_id = event['employee_id']
else:
raise ValueError("employee_id not given")
if 'first_name' in event:
update_expression_values.append('first_name = :val_first_name')
expression_attribute_values[':val_first_name'] = event['first_name']
if 'last_name' in event:
update_expression_values.append('last_name = :val_last_name')
expression_attribute_values[':val_last_name'] = event['last_name']
if len(update_expression_values) < 1:
raise ValueError("first_name and last_name not given")
seperator = ','
update = table.update_item(
Key={
'employee_id': employee_id
},
ConditionExpression= 'attribute_exists(employee_id)',
UpdateExpression='SET ' + seperator.join(update_expression_values),
ExpressionAttributeValues=expression_attribute_values
)
This could be broken down further to reuse the logic through a function that can perform these checks such as the below.
import boto3
import json
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Employee')
update_expression_values = []
expression_attribute_values = {}
def lambda_handler(event, context):
global update_expression_values
global expression_attribute_values
update_expression_values = []
expression_attribute_values = {}
if 'employee_id' in event:
employee_id = event['employee_id']
else:
raise ValueError("employee_id not given")
process_event_key(event, 'first_name')
process_event_key(event, 'last_name')
process_event_key(event, 'new_value')
if len(update_expression_values) < 1:
raise ValueError("first_name and last_name not given")
seperator = ','
update = table.update_item(
Key={
'employee_id': employee_id
},
ConditionExpression= 'attribute_exists(employee_id)',
UpdateExpression='SET ' + seperator.join(update_expression_values),
ExpressionAttributeValues=expression_attribute_values
)
def process_event_key(event, key):
global update_expression_values
global expression_attribute_values
if key in event:
update_expression_values.append(key + ' = :val_' + key)
expression_attribute_values[':val_' + key] = event[key]
Test Event
{
"new_value": "test",
"employee_id": "value2",
"last_name": "value3",
"first_name": "value4"
}
Related
i am trying to make a function that uses multiple aws services in a lambda function :
import os
import boto3
import hmac, hashlib, base64, copy
def lambda_handler(event, context):
secretsmanager = boto3.client('secretsmanager')
secret_response = secretsmanager.get_secret_value(SecretId = os.environ.get('cognitoSecretID'))
cognito = boto3.client('cognito-idp')
cognito_pool_id = os.environ.get('cognitoPoolID')
event_body = event['body']
username = event_body['email']
secret = secret_response['SecretString']
raw_message = username + cognito_pool_id
message = bytes(raw_message, 'utf-8')
secret = bytes(secret,'utf-8')
secret_hash = base64.b64encode(hmac.new(secret, message, digestmod=hashlib.sha256).digest()).decode()
authParameters = {
'USERNAME': username,
'PASSWORD': event_body['password'],
'SECRET_HASH': secret_hash
}
cognito.admin_initiate_auth(UserPoolId = cognito_pool_id, ClientId = os.environ.get('cognitoClientID'), AuthFlow = "ADMIN_USER_PASSWORD_AUTH", AuthParameters = authParameters )
return True
i am testing git with pytest as follows :
import os
import pytest
from unittest.mock import call
import boto3
import hmac, hashlib, base64
from src.DacAdminsLogin import lambda_handler
from __mocks__.SecretsManagerMock import SecretsManagerMock
MockLambdaHandlerEvent = {
'body' : {
'email': "abdelouahedelhariri#gmail.com",
'password': "test"
}
}
#pytest.fixture
def dummy_secret(monkeypatch):
dummySecret = 'adummytest'
monkeypatch.setenv('cognitoSecretID', dummySecret, prepend=False)
return dummySecret
#pytest.fixture
def dummy_cognito_id(monkeypatch):
dummyCognitoID = 'adummycognitoid'
monkeypatch.setenv('cognitoClientID', dummyCognitoID, prepend=False)
return 'adummycognitoid'
#pytest.fixture
def dummy_cognito_pool_id(monkeypatch):
dummyCognitoPoolID = 'adummycognitopoolid'
monkeypatch.setenv('cognitoPoolID', dummyCognitoPoolID, prepend=False)
return 'adummycognitopoolid'
#pytest.fixture
def secret_manager(mocker, dummy_secret):
mock = mocker.patch('__mocks__.SecretsManagerMock.SecretsManagerMock')
mock.get_secret_value.return_value = {'SecretString' : dummy_secret}
return mock
#pytest.fixture
def cognito(mocker):
return mocker.patch('__mocks__.CognitoMock.CognitoMock', return_value= "Test secret")
#pytest.fixture
def client(mocker, secret_manager, cognito):
return mocker.patch('boto3.client', side_effect= [secret_manager, cognito])
def test_create_secrets_manager_client(client):
lambda_handler(MockLambdaHandlerEvent,1)
client.assert_has_calls([call('secretsmanager')])
def test_get_secret_value_call(client ,secret_manager, dummy_secret):
lambda_handler(MockLambdaHandlerEvent,1)
secret_manager.get_secret_value.assert_called_once_with(SecretId= dummy_secret)
def test_create_cognito_client(client ,secret_manager, dummy_secret):
lambda_handler(MockLambdaHandlerEvent,1)
client.assert_has_calls([call('secretsmanager'), call('cognito')])
def test_admin_initiate_auth_call(client, monkeypatch , cognito, dummy_secret, dummy_cognito_id, dummy_cognito_pool_id):
MockLambdaHandlerEventBody = MockLambdaHandlerEvent['body']
username = MockLambdaHandlerEventBody['email']
app_client_id = dummy_cognito_pool_id
key = dummy_secret
message = bytes(username+app_client_id,'utf-8')
key = bytes(dummy_secret,'utf-8')
secret_hash = base64.b64encode(hmac.new(key, message, digestmod=hashlib.sha256).digest()).decode()
authParameters = {
'USERNAME': username,
'PASSWORD': MockLambdaHandlerEventBody['password'],
'SECRET_HASH': secret_hash
}
lambda_handler(MockLambdaHandlerEvent,1)
cognito.admin_initiate_auth.assert_called_once_with(UserPoolId = dummy_cognito_pool_id, ClientId = dummy_cognito_id, AuthFlow = "ADMIN_USER_PASSWORD_AUTH", AuthParameters = authParameters )
I am new to python and i don't know why i keep getting the following error : TypeError: can only concatenate str (not "NoneType") to str, coming from the raw_message concatenation.
When i comment out the cognito.admin_initiate_auth.assert_called_once_with(UserPoolId = dummy_cognito_pool_id, ClientId = dummy_cognito_id, AuthFlow = "ADMIN_USER_PASSWORD_AUTH", AuthParameters = authParameters )
or change for example the UserPoolId to another value the error disappears.
In my case i inherited the crm form and add one mamy2many field this name [Estimation Assign to] when i select the users from this field and save the record that time selected users are added in Add Followers and also send the mail. Now problem is that when i change the kanban card from one stage to another stage that time if in form estimation field assigned some users the mail are also send those users.
but i want to send mail only when i'm open the record and select the users from estimation field and then click on save button only that i want to sand mail. not when i change the kanban card from one stage to another stage.
if you know it please let me know.
#api.model
def create(self, vals):
lead_res = super(CrmLead, self).create(vals)
for rec in lead_res:
if rec.estimation_id:
partner_ids = []
for est_rec in rec.estimation_id:
if est_rec.partner_id and est_rec.partner_id.email:
user_name = self.env.user.name_get()[0][1]
partner_ids.append(est_rec.partner_id.id)
template_obj = self.env['mail.mail']
template_data = {
'subject': 'New Estimation Asign : ',
'body_html': "Hello,</br><h5>" + user_name + " invited you to follow Lead/Opportunity document : " + rec.name + "</h5>",
'email_from': self.env['mail.message']._get_default_from(),
'email_to': est_rec.partner_id.email
}
template_id = template_obj.create(template_data)
template_obj.send(template_id)
if partner_ids:
rec.message_subscribe(partner_ids, None)
return lead_res
#api.multi
def write(self, vals):
res = super(CrmLead, self).write(vals)
for rec in self:
if rec.estimation_id:
partner_ids = []
for est_rec in rec.estimation_id:
if est_rec.partner_id and est_rec.partner_id.email:
user_name = self.env.user.name_get()[0][1]
partner_ids.append(est_rec.partner_id.id)
template_obj = self.env['mail.mail']
template_data = {
'subject': 'New Estimation Asign : ',
'body_html': "Hello,</br><h5>" + user_name + " invited you to follow Lead/Opportunity document : " + rec.name + "</h5>",
'email_from': self.env['mail.message']._get_default_from(),
'email_to': est_rec.partner_id.email
}
template_id = template_obj.create(template_data)
print('===================To sent ==================', est_rec.partner_id.email)
template_obj.send(template_id)
rec.message_subscribe(partner_ids, None)
#message_unsubscribe
message_partner_ids = rec.message_partner_ids.ids
est_ids = [est_rec.partner_id.id for est_rec in rec.estimation_id] + [self.env.ref('base.partner_root').id]
unsub_partners = set(message_partner_ids) - set(est_ids)
if list(unsub_partners):
rec.message_unsubscribe(list(unsub_partners))
else:
print("+++++============= Else Part =============+++++")
return res
Try to add another condition to send mails when estimation_id has changed.
if u'estimation_id' in vals and rec.estimation_id:
EDIT
The following code will compute the new added users:
user_ids = {rec.id: [user_id.id for user_id in rec.estimation_id] for rec in self}
res = super(CrmLead, self).write(vals)
for rec in self:
new_user_ids = [user.id for user in rec.estimation_id if user.id not in user_ids[rec.id]]
For Custom Attributes, Google don't provide an example of use in their example code.
Google docs with missing code in its Python example.
Google's example for creating a job and notes "Create Job with Custom Attributes" but doesn't actually include any code for Custom Attrinbutes:
def sample_create_job(project_id, tenant_id, company_name, requisition_id,
language_code):
"""Create Job with Custom Attributes"""
client = talent_v4beta1.JobServiceClient()
# project_id = 'Your Google Cloud Project ID'
# tenant_id = 'Your Tenant ID (using tenancy is optional)'
# company_name = 'Company name, e.g. projects/your-project/companies/company-id'
# requisition_id = 'Job requisition ID, aka Posting ID. Unique per job.'
# language_code = 'en-US'
if isinstance(project_id, six.binary_type):
project_id = project_id.decode('utf-8')
if isinstance(tenant_id, six.binary_type):
tenant_id = tenant_id.decode('utf-8')
if isinstance(company_name, six.binary_type):
company_name = company_name.decode('utf-8')
if isinstance(requisition_id, six.binary_type):
requisition_id = requisition_id.decode('utf-8')
if isinstance(language_code, six.binary_type):
language_code = language_code.decode('utf-8')
parent = client.tenant_path(project_id, tenant_id)
job = {
'company': company_name,
'requisition_id': requisition_id,
'language_code': language_code
}
response = client.create_job(parent, job)
print('Created job: {}'.format(response.name))
How do I define Custom Attributes for a job?
Something like the following worked for an earlier version of Talent Solution:
job['custom_attributes'] = {
'custom_name' : {'stringValues' : ['s0', 's1', 's2']},
...
}
I've now tried this:
from google.cloud.talent_v4beta1.types import CustomAttribute
job['custom_attributes'] = [
{
'key' : 'keyname',
'value': CustomAttribute(string_values=[valuestring], filterable=True)
}
]
But when I try to create or update a job an exception is thrown: TypeError: {'key': 'keyname', 'value': string_values: "valuestring"
filterable: true
} has type dict, but expected one of: bytes, unicode
In Nov 2020, Google doc official notes how to do just that
from google.cloud import talent
import six
def create_job(project_id, tenant_id, company_id, requisition_id):
"""Create Job with Custom Attributes"""
client = talent.JobServiceClient()
# project_id = 'Your Google Cloud Project ID'
# tenant_id = 'Your Tenant ID (using tenancy is optional)'
# company_id = 'Company name, e.g. projects/your-project/companies/company-id'
# requisition_id = 'Job requisition ID, aka Posting ID. Unique per job.'
# language_code = 'en-US'
if isinstance(project_id, six.binary_type):
project_id = project_id.decode("utf-8")
if isinstance(tenant_id, six.binary_type):
tenant_id = tenant_id.decode("utf-8")
if isinstance(company_id, six.binary_type):
company_id = company_id.decode("utf-8")
# Custom attribute can be string or numeric value,
# and can be filtered in search queries.
# https://cloud.google.com/talent-solution/job-search/docs/custom-attributes
custom_attribute = talent.CustomAttribute()
custom_attribute.filterable = True
custom_attribute.string_values.append("Intern")
custom_attribute.string_values.append("Apprenticeship")
parent = f"projects/{project_id}/tenants/{tenant_id}"
job = talent.Job(
company=company_id,
title="Software Engineer",
requisition_id=requisition_id,
description="This is a description of this job",
language_code="en-us",
custom_attributes={"FOR_STUDENTS": custom_attribute}
)
response = client.create_job(parent=parent, job=job)
print(f"Created job: {response.name}")
return response.name
I also have a WIP python library to help it is based on pydantic object.
j = Job(
company=company.name,
requisition_id=uuid4().hex,
title="engineer",
description="implement system",
custom_attributes={
"tags": CustomAttributes(
string_values=["hello"],
filterable=True,
keyword_searchable=True
)
}
)
j.create(tenant=tenant)
from google.cloud.talent_v4beta1.types import CustomAttribute
job = {
'title' : ...
}
job['custom_attributes'] = {
'keyname0' : CustomAttributes(
string_values=['eg0', 'eg1'],
filterable=False),
'keyname1' : ...
}
I am developing REST APIs with Flask. One of the tables is modeled as follows:
class AudioSessionModel(db.Model):
__tablename__ = 'audio_session'
id = db.Column('audio_session_id', db.Integer, primary_key = True)
cs_id = db.Column(db.Integer)
session_id = db.Column(db.Integer)
facility = db.Column(db.Integer)
description = db.Column(db.String(400))
def __init__(self, cs_id, session_id, facility):
self.cs_id = cs_id
self.session_id = session_id
self.facility = facility
Business logics are defined in a DAO class:
class AudioSessionDAO(object):
def update(self, data):
audio = AudioSessionModel.query.filter(cs_id == data['CSID'], session_id == data['Session'])
audio.description = data['Desc']
db.session.commit()
return audio
This upate function is called in my endpoint for PUT request:
#api.route('/OperatorAccessment')
class OperatorAssessment(Resource):
#api.expect(assessment)
def put(self):
as_dao = AudioSessionDAO()
as_dao.update(request.json)
The model assessment looks like this:
assessment = api.model('Operator Assessment', {
'CSID': fields.Integer(required=True, description='Central Station ID'),
'Session': fields.Integer(required=True, description='Session ID'),
'Desc': fields.String(description='Description')
})
When I test the PUT request with the following json in request body:
{
"CSID": 1,
"Session": 1,
"Desc": "Siren"
}
I got the following error:
File "C:\Users\xxx_app\model\dao.py", line 63, in update
audio = AudioSessionModel.query.filter(cs_id == data['CSID'], session_id == data['Session'])
NameError: name 'cs_id' is not defined
Apparently, cs_id is defined. Why am I still getting this error?
You have to use the attributes of the class, i.e.
AudioSessionModel.query.filter(
AudioSessionModel.cs_id == data['CSID'],
AudioSessionModel.session_id == data['Session'])
Or filter_by with keyword arguments using just =:
AudioSessionModel.query.filter_by(
cs_id=data['CSID'],
session_id=data['Session'])
See What's the difference between filter and filter_by in SQLAlchemy?
I am a novice at Python but am learning as I go. I found this script and it works well but I wanted to make some edits to it so that it also saves the name of the instance that it created a snapshot for.
import boto3
import collections
import datetime
#allows Python developers to write software that makes use of Amazon services like S3 and EC2
ec = boto3.client('ec2')
#finds tags-keys with the name "backup" or "Backup"
def lambda_handler(event, context):
reservations = ec.describe_instances(
Filters=[
{'Name': 'tag-key', 'Values': ['backup', 'Backup']},
]
).get(
'Reservations', []
)
instances = [
i for r in reservations
for i in r['Instances']
]
print "Found %d instances that need backing up" % len(instances)
to_tag = collections.defaultdict(list)
#find tag-keys with the name Retention default value if NULL is 7 days
for instance in instances:
try:
retention_days = [
int(t.get('Value')) for t in instance['Tags']
if t['Key'] == 'Retention'][0]
except IndexError:
retention_days = 7
for dev in instance['BlockDeviceMappings']:
if dev.get('Ebs', None) is None:
continue
vol_id = dev['Ebs']['VolumeId']
print "Found EBS volume %s on instance %s" % (
vol_id, instance['InstanceId'])
snap = ec.create_snapshot(
VolumeId=vol_id,
)
to_tag[retention_days].append(snap['SnapshotId'])
print "Retaining snapshot %s of volume %s from instance %s for %d days" % (
snap['SnapshotId'],
vol_id,
instance['InstanceId'],
retention_days,
)
#set retention days according to the value int input
for retention_days in to_tag.keys():
delete_date = datetime.date.today() + datetime.timedelta(days=retention_days)
delete_fmt = delete_date.strftime('%Y-%m-%d')
print "Will delete %d snapshots on %s" % (len(to_tag[retention_days]), delete_fmt)
ec.create_tags(
Resources=to_tag[retention_days],
Tags=[
{'Key': 'DeleteOn', 'Value': delete_fmt},
]
)
So far I have this but am a little lost as to how to make it work in with the current script above:
snapshot = ec(to_tag['SnapshotId'])
volumename = ''
# Add volume name to snapshot for easier identification
snapshot.create_tags(Tags=[{'Key': 'Name', 'Value': volumename}])
Any ideas welcomed! Thanks.
import boto3
import collections
import datetime
#allows Python developers to write software that makes use of Amazon services like S3 and EC2
ec = boto3.client('ec2')
sns_client = boto3.client('sns')
#finds tags-keys with the name "backup" or "Backup"
def lambda_handler(event, context):
reservations = ec.describe_instances(
Filters=[
{'Name': 'tag-key', 'Values': ['backup', 'Backup']},
]
).get(
'Reservations', []
)
instances = [
i for r in reservations
for i in r['Instances']
]
print "Found %d instances that need backing up" % len(instances)
to_tag = collections.defaultdict(list)
#find tag-keys with the name Retention default value if NULL is 7 days
for instance in instances:
try:
retention_days = [
int(t.get('Value')) for t in instance['Tags']
if t['Key'] == 'Retention'][0]
except IndexError:
retention_days = 7
for dev in instance['BlockDeviceMappings']:
if dev.get('Ebs', None) is None:
continue
vol_id = dev['Ebs']['VolumeId']
print "Found EBS volume %s on instance %s" % (
vol_id, instance['InstanceId'])
volumes = ec2_client.describe_volumes()
volumes = volumes["Volumes"]
volumes_list = []
for volume in volumes:
volumes_list.append([volume["Tags"][0]["Value"], volume["VolumeId"]])
for volume in volumes_list:
try:
create_snapshot_response = ec2_client.create_snapshot(
VolumeId=volume[1],
Description=volume[0] + " " + str(datetime.now()).split(" ")[0],
)
snapshot_id = create_snapshot_response["SnapshotId"]
tags = ec2_client.create_tags(
Resources=[snapshot_id],
Tags=[{
"Key": "Name",
"Value": "{}: {}".format(volume[0], str(datetime.now()).split(" ")[0])
}]
)
to_tag[retention_days].append(snap['SnapshotId'])
print "Retaining snapshot %s of volume %s from instance %s for %d days" % (
snap['SnapshotId'],
vol_id,
instance['InstanceId'],
retention_days,
)
#set retention days according to the value int input
for retention_days in to_tag.keys():
delete_date = datetime.date.today() + datetime.timedelta(days=retention_days)
delete_fmt = delete_date.strftime('%Y-%m-%d')
print "Will delete %d snapshots on %s" % (len(to_tag[retention_days]), delete_fmt)
ec.create_tags(
Resources=to_tag[retention_days],
Tags=[
{'Key': 'DeleteOn', 'Value': delete_fmt},
]
)
import boto3
ec2_client = boto3.client('ec2')
def lambda_handler(event, context):
instances = ec2_client.describe_instances()['Reservations']
for i in instances:
try:
create_snapshot_response = ec2_client.create_snapshot(
VolumeId=i['Instances'][0]['BlockDeviceMappings'][0]["Ebs"]["VolumeId"]
)
snapshot_id = create_snapshot_response["SnapshotId"]
tags = ec2_client.create_tags(
Resources=[snapshot_id],
Tags=[{
"Key": "Name",
"Value": "{}".format(i['Instances'][0]["Tags"][0]['Value'])
}]
)
except Exception as e:
print(e)
return "Success"
Krishna did what I have in my lambda for providing instance names on my snapshots....one change i had was
instance_name = ""
if 'Tags' in instance:
for tag in instance['Tags']:
if tag['Key'] == 'Name':
instance_name = tag['Value']
if not instance_name:
instance_name = instance['InstanceId']
snap = ec.create_snapshot(
VolumeId=vol_id,
TagSpecifications=[{
'ResourceType': 'snapshot',
'Tags': [{
'Key': 'Name',
'Value': instance_name
}]
}]
)
Update lines 43-45 to following:
instance_name = ""
if 'Tags' in instance:
for tag in instance['Tags']:
if tag['Key'] == 'Name':
instance_name = tag['Value']
if not instance_name:
instance_name = instance['InstanceId']
snap = ec.create_snapshot(
VolumeId=vol_id,
TagSpecifications=[{
'ResourceType': 'snapshot',
'Tags': [{
'Key': 'Name',
'Value': instance['InstanceId']
}]
}]
)