get all available buckets and print but only with bucket name - python

Im showing all available buckets with code below, and Im having this result:
<Bucket: test>
But do you know if its possible have only this result (without <Bucket...>, like this:
test
import boto
from boto.s3.connection import S3Connection
s3 = boto.connect_s3()
buckets = s3.get_all_buckets()
for key in buckets:
print key

import boto
from boto.s3.connection import S3Connection
s3 = boto.connect_s3()
buckets = s3.get_all_buckets()
for key in buckets:
print key.name
This should work.. key.name

I wrote up this sample code today, to test out a few things....you may find it helpful as well. This assumes that you have authorization to execute the S3 function or to list the specific bucket:
import boto3
import time
import sys
print ("S3 Listing at %s" % time.ctime())
s3 = boto3.client('s3');
def showSingleBucket( bucketName ):
"Displays the contents of a single bucket"
if ( len(bucketName) == 0 ):
print ("bucket name not provided, listing all buckets....")
time.sleep(8)
else:
print ("Bucket Name provided is: %s" % bucketName)
s3bucket = boto3.resource('s3')
my_bucket = s3bucket.Bucket(bucketName)
for object in my_bucket.objects.all():
print(object.key)
return
def showAllBuckets():
"Displays the contents of S3 for the current account"
try:
# Call S3 to list current buckets
response = s3.list_buckets()
for bucket in response['Buckets']:
print (bucket['Name'])
except ClientError as e:
print("The bucket does not exist, choose how to deal with it or raise the exception: "+e)
return
if ( len(sys.argv[1:]) != 0 ):
showSingleBucket(''.join(sys.argv[1]))
else:
showAllBuckets()

Related

Listing objects from each and every bucket present in my s3

I have 5 buckets in my S3. I have to list objects for every bucket present in my s3 by python script. I am writing script something like this :
import boto3
def lambda_handler(event, context):
s3 = boto3.client('s3')
response = s3.list_buckets()
print('Existing buckets:')
for bucket in response['Buckets']:
for obj in bucket.object.all(['bucket']):
response = obj.get(
Key, StorageClass, Size)
print(response)
You can check the following code:
import boto3
s3 = boto3.client('s3')
s3r = boto3.resource('s3')
def lambda_handler(event, context):
response = s3.list_buckets()
for bucket_info in response['Buckets']:
bucket = s3r.Bucket(bucket_info['Name'])
print('Existing buckets:', bucket_info['Name'])
for object in bucket.objects.all():
print(' - ', object.key)

Cannot upload s3 files to another region (clients bucket) despite successful response

This is my code. I am trying to copy a directory from one bucket to another. I am seeing everything is positive, but files are not appearing in the clients bucket.
import boto3
ACCESS_KEY = 'access_key'
SECRET_KEY = 'secret_key'
REGION_NAME = 'US_EAST_1'
source_bucket = 'source_bucket'
#Make sure you provide / in the end
source_prefix = 'source_prefix'
target_bucket = 'target-bucket'
target_prefix = 'target-prefix'
client = boto3.client('s3')
session_src = boto3.session.Session()
source_s3_r = session_src.resource('s3')
def get_s3_keys(bucket, prefix):
keys = []
response = client.list_objects_v2(Bucket=bucket,Prefix=prefix,MaxKeys=100)
for obj in response['Contents']:
keys.append(obj['Key'])
return keys
session_dest = boto3.session.Session(aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
dest_s3_r = session_dest.resource('s3')
# create a reference to source image
old_obj = source_s3_r.Object(source_bucket, source_prefix)
# create a reference for destination image
new_obj = dest_s3_r.Object(target_bucket, target_prefix)
keys = get_s3_keys(source_bucket, source_prefix)
responses = []
# upload the image to destination S3 object
for filename in keys:
print("Transferring file {}, {}".format(source_bucket,filename))
old_obj = source_s3_r.Object(source_bucket, filename)
response = new_obj.put(Body=old_obj.get()['Body'].read())
response_code = response['ResponseMetadata']['HTTPStatusCode']
responses.append(response_code)
print("File transfer response {}".format(response_code))
distinct_response = list(set(responses))
if len(distinct_response) > 1 or distinct_response[0] != 200:
print("File could not be transfered to krux bucket. Exiting now")
exit(1)
else:
print("File transfer to krux bucket successful")
I am getting a successful response code of 200 but the file is not transferred across.
Srinivas, Try this
I used S3 Resource object, try equivalent S3 Client if you want...
bucket= s3.Bucket(bucket_name) #from_bucket
for osi in bucket.objects.all():
print(osi)
copy_source={
'Bucket': bucket.name,
'Key': osi.key
}
s3.Bucket('to_bucket').copy(copy_source, osi.key)
Hope it helps..
r0ck

How to check S3 bucket have tags or not

I tried to check the existing s3 buckets have tags or not, if bucket not have tags, will add the tags, i tried below code
for region in region_list:
s3 = boto3.resource('s3', region)
s3_client = boto3.client('s3', region)
for bucket in s3.buckets.all():
s3_bucket = bucket
s3_bucket_name = s3_bucket.name
response = s3_client.get_bucket_tagging(Bucket=s3_bucket_name)
tagset = response['TagSet']
if len(response['TagSet'])==0:
print "s3 bucket not have tags, adding tags"
else:
pass
but getting below error
Traceback (most recent call last):
File "C:\Python27\ec2info.py", line 235, in <module>
response = s3_client.get_bucket_tagging(Bucket=s3_bucket_name)
File "C:\Python27\lib\site-packages\botocore\client.py", line 314, in
_api_call
return self._make_api_call(operation_name, kwargs)
File "C:\Python27\lib\site-packages\botocore\client.py", line 612, in
_make_api_call
raise error_class(parsed_response, operation_name)
ClientError: An error occurred (NoSuchTagSet) when calling the
GetBucketTagging operation: The TagSet does not exist
where i am doing wrong here, what is the correct way of checking s3 bucket have tags or not
Thanks in advance for your help
Because get_bucket_tagging throws NoSuchTagSet when there are no tags. Catch the exception and create tags. Also, do not loop through regions, you will get all buckets irrespective of the region endpoint you connect to.
See: NoSuchTagSet when calling the GetBucketTagging operation
from botocore.exceptions import ClientError
for bucket in s3.buckets.all():
s3_bucket = bucket
s3_bucket_name = s3_bucket.name
try:
response = s3_client.get_bucket_tagging(Bucket=s3_bucket_name)
#print response
#tagset = response['TagSet']
except ClientError:
print s3_bucket_name, "does not have tags, adding tags"
To expand on the correct answer of helloV, catch the correct exception as following:
s3_client = boto3.client('s3')
bucket_name = 'mybucket'
try:
response = s3_client.get_bucket_tagging(Bucket=bucket_name)
tags = response["TagSet"]
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchTagSet':
tags = {}
else:
raise e
here is the complete code how you will do it
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
s3_re = boto3.resource('s3')
for bucket in s3_re.buckets.all():
s3_bucket = bucket
s3_bucket_name = s3_bucket.name
bucket_tagging = s3_re.BucketTagging(s3_bucket_name)
try:
response = s3.get_bucket_tagging(Bucket=s3_bucket_name)
except ClientError:
print (bucket+ ",does not have tags, add tag")
print("give key : ")
inp_key = input()
print("give value : ")
inp_val = input()
response = bucket_tagging.put(
Tagging={
'TagSet': [
{
'Key': inp_key,
'Value': inp_val
},
]
}
)
Hope this code helps to keep track of your s3 tags
#s3 Buckets
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client('s3')
dict_of_s3_buckets = s3_client.list_buckets()
list_of_s3_buckets= [each['Name'] for each in dict_of_s3_buckets['Buckets']]
i=0
s3_bucket_tag_status={}
while i<len(list_of_s3_buckets):
s3_bucket_name = list_of_s3_buckets[i]
try:
response = s3_client.get_bucket_tagging(Bucket=s3_bucket_name)
tags = response['TagSet']
s3_bucket_tag_status[s3_bucket_name]=tags
except ClientError:
#print(s3_bucket_name, "does not have tags")
no_tags='does not have tags'
s3_bucket_tag_status[s3_bucket_name]=no_tags
i+=1
#changing to pandas dataframe (if required)
import pandas as pd
s3_bucket_tags= pd.DataFrame.from_dict(s3_bucket_tag_status,orient='index').reset_index().rename(columns={'index':'bucketName',0:'Tags'})

Downloading the files from s3 recursively using boto python.

I have a bucket in s3, which has deep directory structure. I wish I could download them all at once. My files look like this :
foo/bar/1. .
foo/bar/100 . .
Are there any ways to download these files recursively from the s3 bucket using boto lib in python?
Thanks in advance.
You can download all files in a bucket like this (untested):
from boto.s3.connection import S3Connection
conn = S3Connection('your-access-key','your-secret-key')
bucket = conn.get_bucket('bucket')
for key in bucket.list():
try:
res = key.get_contents_to_filename(key.name)
except:
logging.info(key.name+":"+"FAILED")
Keep in mind that folders in S3 are simply another way of writing the key name and only clients will show this as folders.
#!/usr/bin/env python
import boto
import sys, os
from boto.s3.key import Key
from boto.exception import S3ResponseError
DOWNLOAD_LOCATION_PATH = os.path.expanduser("~") + "/s3-backup/"
if not os.path.exists(DOWNLOAD_LOCATION_PATH):
print ("Making download directory")
os.mkdir(DOWNLOAD_LOCATION_PATH)
def backup_s3_folder():
BUCKET_NAME = "your-bucket-name"
AWS_ACCESS_KEY_ID= os.getenv("AWS_KEY_ID") # set your AWS_KEY_ID on your environment path
AWS_ACCESS_SECRET_KEY = os.getenv("AWS_ACCESS_KEY") # set your AWS_ACCESS_KEY on your environment path
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_ACCESS_SECRET_KEY)
bucket = conn.get_bucket(BUCKET_NAME)
#goto through the list of files
bucket_list = bucket.list()
for l in bucket_list:
key_string = str(l.key)
s3_path = DOWNLOAD_LOCATION_PATH + key_string
try:
print ("Current File is ", s3_path)
l.get_contents_to_filename(s3_path)
except (OSError,S3ResponseError) as e:
pass
# check if the file has been downloaded locally
if not os.path.exists(s3_path):
try:
os.makedirs(s3_path)
except OSError as exc:
# let guard againts race conditions
import errno
if exc.errno != errno.EEXIST:
raise
if __name__ == '__main__':
backup_s3_folder()
import boto, os
LOCAL_PATH = 'tmp/'
AWS_ACCESS_KEY_ID = 'YOUUR_AWS_ACCESS_KEY_ID'
AWS_SECRET_ACCESS_KEY = 'YOUR_AWS_SECRET_ACCESS_KEY'
bucket_name = 'your_bucket_name'
# connect to the bucket
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(bucket_name)
# go through the list of files
bucket_list = bucket.list()
for l in bucket_list:
keyString = str(l.key)
d = LOCAL_PATH + keyString
try:
l.get_contents_to_filename(d)
except OSError:
# check if dir exists
if not os.path.exists(d):
os.makedirs(d) # Creates dirs recurcivly
Just added directory creation part to #j0nes comment
from boto.s3.connection import S3Connection
import os
conn = S3Connection('your-access-key','your-secret-key')
bucket = conn.get_bucket('bucket')
for key in bucket.list():
print key.name
if key.name.endswith('/'):
if not os.path.exists('./'+key.name):
os.makedirs('./'+key.name)
else:
res = key.get_contents_to_filename('./'+key.name)
This will download files to current directory and will create directories when needed.
if you have more than 1000 files in the folder you need to use a paginator
to iterate through them
import boto3
import os
# create the client object
client = boto3.client(
's3',
aws_access_key_id= S3_ACCESS_KEY,
aws_secret_access_key= S3_SECRET_KEY
)
# bucket and folder urls
bucket= 'bucket-name'
data_key = 'key/to/data/'
paginator = client.get_paginator("list_objects_v2")
for page in paginator.paginate(Bucket=bucket, Prefix=data_key):
for obj in page['Contents']:
key = obj['Key']
tmp_dir = '/'.join(key.split('/')[0:-1])
if not os.path.exists('/'.join(key.split('/')[0:-1])):
os.makedirs(tmp_dir)
else:
client.download_file(bucket, key, tmp_dir + key.split('/')[-1])
import boto
from boto.s3.key import Key
keyId = 'YOUR_AWS_ACCESS_KEY_ID'
sKeyId='YOUR_AWS_ACCESS_KEY_ID'
bucketName='your_bucket_name'
conn = boto.connect_s3(keyId,sKeyId)
bucket = conn.get_bucket(bucketName)
for key in bucket.list():
print ">>>>>"+key.name
pathV = key.name.split('/')
if(pathV[0] == "data"):
if(pathV[1] != ""):
srcFileName = key.name
filename = key.name
filename = filename.split('/')[1]
destFileName = "model/data/"+filename
k = Key(bucket,srcFileName)
k.get_contents_to_filename(destFileName)
elif(pathV[0] == "nlu_data"):
if(pathV[1] != ""):
srcFileName = key.name
filename = key.name
filename = filename.split('/')[1]
destFileName = "model/nlu_data/"+filename
k = Key(bucket,srcFileName)
k.get_contents_to_filename(destFileName`

python s3 using boto, says 'attribute error: 'str' object has no attribute 'connection'

I have a connection that works as I can list buckets, but having issues when trying to add a object.
conn = S3Connection(awskey, awssecret)
key = Key(mybucket)
key.key = p.sku
key.set_contents_from_filename(fullpathtofile)
I get the error:
'attribute error: 'str' object has no attribute 'connection'
the error is in the file:
/usr/local/lib/python2.6/dist-package/boto-2.obl-py2.6.egg/boto/s3/key.py' line # 539
Just replace:
key = Key(mybucket)
with:
mybucket = "foo"
bucketobj = conn.get_bucket(mybucket)
mykey = Key(bucketobj)
Expanding on sth's comment, you can't pass a string, it needs to be a bucket object.
Key expects a bucket object as its first parameter (possibly created by conn.create_bucket()).
It looks like mybucket isn't a bucket, but a string, so the call fails.
Here's how I would do this:
import boto
s3 = boto.connect_s3()
bucket = s3.get_bucket("mybucketname")
key = bucket.new_key("mynewkeyname")
key.set_contents_from_filename('path_to_local_file', policy='public-read')
Mitch
import os
import boto.s3.connection
accessKeyId = 'YOUR_AWS_ACCESS_KEY_ID'
secretKey = 'YOUR_AWS_SECERT_KEY_ID'
host = 'HOST'
S3 = boto.connect_s3(
aws_access_key_id = accessKeyId,
aws_secret_access_key = secretKey,
host = host,
port = PORT,
calling_format = boto.s3.connection.OrdinaryCallingFormat(),
)
def upload_objects():
try:
bucket_name = "bucket name" #s3 bucket name
root_path = 'model/' # local folder for upload
my_bucket = S3.get_bucket(bucket_name)
for path, subdirs, files in os.walk(root_path):
path = path.replace("\\","/")
directory_name = path.replace(root_path,"")
for file in files:
if(file != ".DS_Store"):
full_key_name = os.path.join(path, file)
k = my_bucket.new_key(full_key_name)
k.set_contents_from_filename('/model/'+directory_name+'/'+file)
except Exception as err:
print(err)
upload_objects()
import boto3
s3 = boto3.resource('s3')
mybucket = s3.Bucket('mybucketName')
Now you will get the s3 bucket object. You were getting the string.
Enjoy!

Categories