I'm sending emails to verified identities in AWS SES with ASW Lambda without problems.
Now I'm just trying to list verified identities and getting no response.
Here is my code:
import boto3
from botocore.exceptions import ClientError
def list_identities():
ses = boto3.client('ses')
response = ses.list_identities(
IdentityType = 'EmailAddress',
MaxItems=10
)
def lambda_handler(event, context):
# TODO implement
print("Listing EMAILS:")
list_identities()
In function log I see printed Listing email: and nothing else.
Lambda function is invoked in same region as AWS SES.
You don't return anything from your function.
Try this:
import boto3
def list_identities():
ses = boto3.client('ses')
response = ses.list_identities(
IdentityType='EmailAddress',
MaxItems=10
)
return response
def lambda_handler(event, context):
# TODO implement
print("Listing EMAILS:")
print(list_identities())
Related
I'm using the following method in order to auto-refresh AWS Tokens using IAM role and boto3 (based on, these resources):
import boto3
from botocore.credentials import RefreshableCredentials
from botocore.session import get_session
def get_aws_credentials():
aws_role_arn = 'AWS_ROLE_ARN'
sts_client = boto3.client('sts')
assumed_role_object = sts_client.assume_role(
RoleArn = aws_role_arn,
RoleSessionName = "SessioName",
DurationSeconds = 900
)
return {
'access_key': assumed_role_object['Credentials']['AccessKeyId'],
'secret_key': assumed_role_object['Credentials']['SecretAccessKey'],
'token': assumed_role_object['Credentials']['SessionToken'],
'expiry_time': assumed_role_object['Credentials']['Expiration'].isoformat()
}
session_credentials = RefreshableCredentials.create_from_metadata(
metadata = get_aws_credentials(),
refresh_using = get_aws_credentials,
method = 'sts-assume-role'
)
session = get_session()
session._credentials = session_credentials
autorefresh_session = boto3.Session(botocore_session=session)
This is fine for managing AWS resources such as a S3 bucket, for which I'm using:
autorefresh_session.resource('s3')
And RefreshableCredentials is managing the job of refreshing the credentials.
However, I have also the need to do some requests like this one:
import requests
r = requests.post(
"https://myawsgatewayurl.com/endpoint",
json=json_message,
auth=aws_auth
)
Where I would be using:
from aws_requests_auth.aws_auth import AWSRequestsAuth
generated_credentials = get_aws_credentials()
aws_auth = AWSRequestsAuth(
aws_access_key=generated_credentials['access_key'],
aws_secret_access_key=generated_credentials['secret_key'],
aws_token=generated_credentials['token'],
aws_host=my_aws_host,
aws_region=my_aws_region,
aws_service=my_aws_service
)
However, this approach seems to me not efficient, because I should call get_aws_credentials() each time I do a request, to be sure they are not expired (or check expiration before), while I would like to rely on RefreshableCredentials for this.
Is there a better approach?
When I try to return a generate presigned url using boto3 from bucket in aws s3 and the code:
import fastapi
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client("s3",
aws_access_key_id="...",
aws_secret_access_key="...")
BUCKET_NAME = "tayibat-files"
app = FastAPI()
#app.get('/{file_name}')
async def method_name(file_name: str):
try:
url = s3.generate_presigned_url(
'get_object',
Params={'Bucket': BUCKET_NAME,
'Key': f"products/{file_name}"},
ExpiresIn=3600
)
except ClientError as e:
logging.error(e)
return url
the get request return an url, but when I try to open it in browsers, It generate:
This XML file does not appear to have any style information associated with it. The document
tree is shown below.
<Error>
<Code>InvalidRequest</Code>
<Message>The authorization mechanism you have provided is not supported. Please use AWS4-HMAC-
SHA256.</Message>
<RequestId>ZW269CV1TAYC7CWC</RequestId>
<HostId>1yozjolBbu4difnOjjopLeOk79i34WDOFwp1VQA4Nqd0RBdLNkaOkb/uJVjFtyNu78fx06JfCbI=</HostId>
</Error>
The issue is not your code but your method of authentication or region.
I ran your code sample successfully:
import boto3
session = boto3.session.Session(profile_name="<my-profile>")
client = session.client('s3')
BUCKET_NAME = "<bucket>"
file_name = "<file>"
url = client.generate_presigned_url(
'get_object',
Params={'Bucket': BUCKET_NAME,
'Key': f"products/{file_name}"},
ExpiresIn=3600
)
print(url)
It worked fine because the region of my bucket aligned with the region of my credentials. When I tried to generate a presigned url from another region I got your same error:
<Error>
<Code>InvalidRequest</Code>
<Message>The authorization mechanism you have provided is not supported. Please use AWS4-HMAC-SHA256.</Message>
<RequestId>JJPZYSMZTC7Z8H</RequestId>
<HostId>TsgdZIibKxZ4GVL3h28OJYIvh59yfgeZwVf+eGPXVEzIJsAxdp1VQL67vw20LR/r9uIBxro=</HostId>
</Error>
Hi I am learning how to use AWS lambda functions and I don't understand how to use the lambda handler. When I use this function in the results I get the expected return in function logs.
import boto3
session = boto3.Session(
aws_access_key_id='XXXXXXXXXXXXX',
aws_secret_access_key='XXXXXXXXXXXXXXX')
#Then use the session to get the resource
s3 = session.resource('s3')
my_bucket = s3.Bucket('XXXXX')
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
But when I added the lambda_handler doesn't work
import boto3
def lambda_handler(event, context):
session = boto3.Session(
aws_access_key_id='XXXXXXXXXXXXX',
aws_secret_access_key='XXXXXXXXXXXX')
#Then use the session to get the resource
s3 = session.resource('s3')
my_bucket = s3.Bucket('XXXXXX')
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
For this case I received as response
{
"errorMessage": "2022-05-10T14:50:10.023Z a840a005-9af0-4827-919a-7e2bd7eb0aae Task timed out after 3.02 seconds"
}
If anyone has knowledge of what I am doing wrong I would appreciate it.
Try this without any security settings on a S3 bucket that you make public for testing purposes. You can then run/test this from you local machine to help you with debugging before you deploy it.
def list_file_in_bucket(event, context):
bucket_name = event['bucket_name']
s3 = boto3.resource('s3')
my_bucket = s3.Bucket(bucket_name)
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
Tested with
def test_s3():
test_bucket = 'public-test-bucket'
event = {}
event['bucket_name'] = test_bucket
function.list_file_in_bucket(event, {})
assert True == False
Obviously you want to change the assert.
I am using code similar to below to trigger an AWS Lambda function on my AWS educate account, when running this nothing triggers on the lambda (the lambda works with the same payload through the test configuration). My session and permissions are also correct as I am able to use boto3 to access S3 resources with the same credentials. What can I try to attempt to fix/troubleshoot this issue?
Apologies if this is vague (I know it is) but I am very confused on why this is happening
import boto3
import json
AWS_ACCESS_KEY_ID ="XXXXXXXXXXXXXXXXXX"
AWS_SECRET_ACCESS_KEY ="XXXXXXXXXXXXXXXXXXXXXXXXXX"
REGION = 'us-east-1'
session = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
payload = json.dumps({"function":"tweets","amount":10,"time":10})
client = boto3.client('lambda',
region_name=REGION,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
aws_session_token = session)
response = client.invoke(
FunctionName="MyFunctionARN",
InvocationType="RequestResponse",
Payload=payload
)
Every lambda function has an handler function which is the entry for the code. By default it is lambda_handler. You can also change the default handler function under Runtime settings. The following code will solve your problem.
import boto3
import json
AWS_ACCESS_KEY_ID ="XXXXXXXXXXXXXXXXXX"
AWS_SECRET_ACCESS_KEY ="XXXXXXXXXXXXXXXXXXXXXXXXXX"
REGION = 'us-east-1'
session = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
def lambda_handler(event,context):
payload = json.dumps({"function":"tweets","amount":10,"time":10})
client = boto3.client('lambda',
region_name=REGION,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
aws_session_token = session)
response = client.invoke(
FunctionName="MyFunctionARN",
InvocationType="RequestResponse",
Payload=payload
)
Using the Azure Python SDK, I would like to return a KeyVaultClient using the get_client_from_auth_file method in order to get secrets from a KeyVault without going through a KeyVaultManagementClient.
According to the documentation, it appears to be possible to create a client from any SDK client class.
I am able to do this:
from azure.common.client_factory import get_client_from_auth_file
from azure.mgmt.keyvault import KeyVaultManagementClient
_kv_mgmt_client = get_client_from_auth_file(KeyVaultManagementClient)
but not this:
from azure.common.client_factory import get_client_from_auth_file
from azure.keyvault import KeyVaultClient
_kv_client = get_client_from_auth_file(KeyVaultClient)
This is the error message: TypeError: __init__() got an unexpected keyword argument 'base_url'
Update:
Upon review, get_client_from_auth_file returns several results including base_url, so the following helper function resolves the TypeError.
class KeyVaultClientHelper:
def __init__(self, credentials, **kwargs):
self._credentials = credentials
And the KeyVaultClient is successful until it tries to get a secret and it returns Unauthorized.
helper = get_client_from_auth_file(KeyVaultClientHelper)
client = KeyVaultClient(helper._credentials)
print(client.get_secret("http://my-vault-url...", "MY-KEY", '').value))
However, I am successful in getting secrets using a ServicePrincipalCredential with the same auth file.
this was a bug in azure-common, fixed in 1.1.22:
https://pypi.org/project/azure-common/1.1.22/
Thanks!
Kristin,
you can try something like below, it has a working sample for getting the keyvault client
import adal
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
from azure.common.credentials import ServicePrincipalCredentials
from msrestazure.azure_active_directory import AADTokenCredentials
client_id = '<client_id>'
client_secret = '<client_secret>'
tenant = '<tenant>'
vault_address = '<vault_address>'
secret_name = '<secret_name>'
resource_uri = 'https://vault.azure.net'
def auth_with_adal(server, resource, scope):
authority_host_uri = 'https://login.windows.net'
authority_uri = authority_host_uri + '/' + tenant
context = adal.AuthenticationContext(authority_uri, api_version=None)
mgmt_token = context.acquire_token_with_client_credentials(resource_uri, client_id, client_secret)
credentials = AADTokenCredentials(mgmt_token, client_id)
token = credentials.token
return token['token_type'], token['access_token']
def auth_with_spc(server, resource, scope):
credentials = ServicePrincipalCredentials(
client_id = client_id,
secret = client_secret,
tenant = tenant,
resource = resource_uri
)
token = credentials.token
return token['token_type'], token['access_token']
try:
client = KeyVaultClient(KeyVaultAuthentication(auth_with_adal))
secret_bundle = client.get_secret(vault_address, secret_name, '')
print('1) I got the secret using AADTokenCredentials!')
except Exception as e:
print('1) Failed to get a secret!')
print(e)
try:
client = KeyVaultClient(KeyVaultAuthentication(auth_with_spc))
secret_bundle = client.get_secret(vault_address, secret_name, '')
print('2) I got the secret using ServicePrincipalCredentials!')
except Exception as e:
print('2) Failed to get a secret!')
print(e)
You can use below function to achieve it.
client = KeyVaultClient(KeyVaultAuthentication(auth_with_spc))
Hope it helps.