RequestTimeTooSkewed: boto3 upload sometimes fails when run in thread - python

I often want to upload files to Amazon S3 in the background. I'm using threads for this.
Sometimes, but not always, this results in a RequestTimeTooSkewed error.
import threading
from pathlib import Path
import boto3
def upload_file_s3(
local_path: Path,
s3_path: Path,
bucket: str,
) -> None:
ACCESS_KEY = ""
SECRET_KEY = ""
client = boto3.client(
"s3",
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
region_name="eu-central-1",
use_ssl=False,
verify=False,
)
client.upload_file(str(local_path), bucket, str(s3_path))
local_path = Path('/tmp/testfile')
s3_path = Path('testfile')
bucket = 'my_bucket'
thread = threading.Thread(target=upload_file_s3, args=(local_path, s3_path, bucket))
thread.start()
botocore.exceptions.ClientError: An error occurred (RequestTimeTooSkewed) when calling the UploadPart operation: The difference between the request time and the current time is too large.

Related

Boto3 S3 list_objects_v2 Not Returning Any Objects

I'm using Boto3 to try to get a list of keys from an S3 bucket via an AWS Lambda Python script. No matter what I try, the bucket returns no objects.
import json, boto3, os
def getConfig():
cfg = {
"aws_key_id": os.getenv("AWS_KEY_ID", ""),
"aws_secret": os.getenv("AWS_SECRET", ""),
}
return cfg
def lambda_handler(event, context):
cfg = getConfig()
bucket_name = "zachs-taxi"
session = boto3.Session(
aws_access_key_id=cfg.get('aws_key_id'),
aws_secret_access_key=cfg.get('aws_secret')
)
s3 = session.client('s3')
I've tried both of the following but both return empty:
response = s3.list_objects_v2(
Bucket=bucket_name)
for content in response.get('Contents', []):
print(content['Key'])
And
paginator = s3.get_paginator("list_objects_v2")
for page in paginator.paginate(Bucket=bucket_name):
for content in page.get('Contents', ()):
print(content['Key'])
The S3 bucket is public and I can access it. Inside there is a folder called content and within that folder is a .png file.
Any help would be appreciated. Thanks!
Your code ran perfectly fine for me (with a different bucket name) when I ran it on my own computer:
import boto3
bucket_name = "my-bucketname"
s3 = boto3.client('s3')
response = s3.list_objects_v2(Bucket=bucket_name)
for content in response.get('Contents', []):
print(content['Key'])

How can I remove expiry from the S3 bucket image object URL?

I am uploading multiple images to S3 bucket but once the images are attached and I receive the image URLs, they have certain expiry date. I don't want them to expire at all. What should I do?
Python code:
from werkzeug.utils import secure_filename
url_attach = []
image_file = request.files.getlist('files')
for item in image_file:
filename = secure_filename(item.filename)
url = upload2s3(item, filename)
url_attach.append(url)
upload function:
def upload2s3(img_content, key_name):
try:
s3_conn = boto3.client(
"s3",
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_KEY,
)
x = s3_conn.put_object(Bucket=BUCKET_NAME, Body=img_content.read(), Key=key_name)
url = create_url(BUCKET_NAME, key_name)
return url
except Exception as ex:
return {"status": False, "message": ex}
url function:
def create_url(bucket, object):
client = boto3.client(
"s3", aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_KEY
)
return client.generate_presigned_url(
"get_object", Params={"Bucket": bucket, "Key": object}
)
sample url: https://test_dir.s3.amazonaws.com/XYZ.png?AWSAccessKeyId=###########&Signature=##############&Expires=#########
Somewhere, I read that generate_presigned_url has max expiry as 7 days. is there any alternative to that?
I don't want them to expire at all. What should I do?
Nothing, because its not possible. You have develop a custom solution to regenerate the new links when they about to expire. Otherwise do not use S3 Presigned URLs.Instead server your files through CloudFront.

"error message" with lambda handler in aws lambda

Hi I am learning how to use AWS lambda functions and I don't understand how to use the lambda handler. When I use this function in the results I get the expected return in function logs.
import boto3
session = boto3.Session(
aws_access_key_id='XXXXXXXXXXXXX',
aws_secret_access_key='XXXXXXXXXXXXXXX')
#Then use the session to get the resource
s3 = session.resource('s3')
my_bucket = s3.Bucket('XXXXX')
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
But when I added the lambda_handler doesn't work
import boto3
def lambda_handler(event, context):
session = boto3.Session(
aws_access_key_id='XXXXXXXXXXXXX',
aws_secret_access_key='XXXXXXXXXXXX')
#Then use the session to get the resource
s3 = session.resource('s3')
my_bucket = s3.Bucket('XXXXXX')
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
For this case I received as response
{
"errorMessage": "2022-05-10T14:50:10.023Z a840a005-9af0-4827-919a-7e2bd7eb0aae Task timed out after 3.02 seconds"
}
If anyone has knowledge of what I am doing wrong I would appreciate it.
Try this without any security settings on a S3 bucket that you make public for testing purposes. You can then run/test this from you local machine to help you with debugging before you deploy it.
def list_file_in_bucket(event, context):
bucket_name = event['bucket_name']
s3 = boto3.resource('s3')
my_bucket = s3.Bucket(bucket_name)
for my_bucket_object in my_bucket.objects.all():
print(my_bucket_object.key)
Tested with
def test_s3():
test_bucket = 'public-test-bucket'
event = {}
event['bucket_name'] = test_bucket
function.list_file_in_bucket(event, {})
assert True == False
Obviously you want to change the assert.

Boto3 AWS lambda not triggering

I am using code similar to below to trigger an AWS Lambda function on my AWS educate account, when running this nothing triggers on the lambda (the lambda works with the same payload through the test configuration). My session and permissions are also correct as I am able to use boto3 to access S3 resources with the same credentials. What can I try to attempt to fix/troubleshoot this issue?
Apologies if this is vague (I know it is) but I am very confused on why this is happening
import boto3
import json
AWS_ACCESS_KEY_ID ="XXXXXXXXXXXXXXXXXX"
AWS_SECRET_ACCESS_KEY ="XXXXXXXXXXXXXXXXXXXXXXXXXX"
REGION = 'us-east-1'
session = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
payload = json.dumps({"function":"tweets","amount":10,"time":10})
client = boto3.client('lambda',
region_name=REGION,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
aws_session_token = session)
response = client.invoke(
FunctionName="MyFunctionARN",
InvocationType="RequestResponse",
Payload=payload
)
Every lambda function has an handler function which is the entry for the code. By default it is lambda_handler. You can also change the default handler function under Runtime settings. The following code will solve your problem.
import boto3
import json
AWS_ACCESS_KEY_ID ="XXXXXXXXXXXXXXXXXX"
AWS_SECRET_ACCESS_KEY ="XXXXXXXXXXXXXXXXXXXXXXXXXX"
REGION = 'us-east-1'
session = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
def lambda_handler(event,context):
payload = json.dumps({"function":"tweets","amount":10,"time":10})
client = boto3.client('lambda',
region_name=REGION,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
aws_session_token = session)
response = client.invoke(
FunctionName="MyFunctionARN",
InvocationType="RequestResponse",
Payload=payload
)

How to generate url from boto3 in amazon web services

I have a Bucket in s3 and I am trying to pull the url of the image that is in there.
I am using boto3 and boto3 doesn't seem to have an implemented generate url method.
They have a core method, that generates url like this,
import botocore.session
session = botocore.session.get_session()
client = session.create_client('s3')
presigned_url = client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket_name, 'Key': self.key})
One thing I am forced to do is, I have to send the parameters along with each request using session object. And the above method does not allow me to set the session variables (ie .. aws credentials)
The closest I can get is this
session = Session(aws_access_key_id='342342342342', aws_secret_access_key='3434234322', region_name='us-east-1')
s3 = session.resource('s3')
object = s3.Object('my-dev-bucket', 'amazonKeyString')
print object.get()["Body"]
This gets me amazon s3 object which is an object called
botocore.response.StreamingBody object at 0x7ffaff8cef50
Can I get a url of the image this way?
Able to get results and did not face any issues in getting the signed URL.
I used the default session since my aws creds were stored locally in "~/.aws/credentials" file and my default region is set as needed ~/.aws/config
import boto3
s3Client = boto3.client('s3')
s3Client.generate_presigned_url('get_object', Params = {'Bucket': 'www.mybucket.com', 'Key': 'hello.txt'}, ExpiresIn = 100)
If you need to pass params for Session, import boto3.session and create custom session
import boto3.session
session = boto3.session.Session(region_name='eu-central-1')
s3Client = session.client('s3')
If you don't want to use aws configure command, you can pass the credentials directly like this and generate the public URL.
def generate_public_url(bucket_name, file_name, aws_region, aws_key_id, aws_secret, timeout=300):
#if not object_exists(bucket_name, file_name):
# raise Exception(f"0 or many items found in bucket '{bucket_name}' with key '{file_name}')")
s3_client = boto3.client('s3', config=Config(signature_version='s3v4'),
region_name=aws_region, aws_access_key_id=aws_key_id, aws_secret_access_key=aws_secret)
url = s3_client.generate_presigned_url(
ClientMethod='get_object',
Params={
'Bucket': bucket_name,
'Key': file_name
},
ExpiresIn=timeout # seconds
)
return url

Categories