Gmail API socket.timeout : The read operation timed out - python

My program which utilizes the Python Gmail API has been encountering alot of socket.timeout errors. I am very frequently receiving the following:
socket.timeout: The read operation timed out
This error appears to be random and generally occurs with any Gmail API function. I have tried modifying the socket timeout parameter. However, changing this parameter does not seem to remove the issue. I have varied it from 1 second, 10 seconds, and 600 seconds.
socket.setdefaulttimeout(10)
Or for an httplib2.Http object:
def build_http(self):
"""Builds httplib2.Http object
Returns:
A httplib2.Http object, which is used to make http requests, and which has timeout set by default.
To override default timeout call
socket.setdefaulttimeout(timeout_in_sec)
before interacting with this method.
"""
try:
return httplib2.Http(timeout=10)
except:
self.GLogger.error("An error was encountered in build_http")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
The following code is used to initialize the Gmail API service.
def gmailAPIInitialize(self):
try:
self.waitForInternet()
self.GLogger.info("Initializing the Gmail API Service")
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if self.reprocess is True:
token_pickle_file = 'Gmail_token_2.pickle'
credentials_file = 'Gmail_credentials_2.json'
else:
token_pickle_file = 'Gmail_token_1.pickle'
credentials_file = 'Gmail_credentials_1.json'
if os.path.exists(token_pickle_file):
with open(token_pickle_file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
credentials_file, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(token_pickle_file, 'wb') as token:
pickle.dump(creds, token)
#service = build('gmail', 'v1', credentials=creds, cache_discovery=False)
service = build('gmail', 'v1', credentials=creds, cache_discovery=False)
self.gmailAPIService = service
self.GLogger.info("Successfully initialized the Gmail API Service")
return True
except:
self.GLogger.error("An error was encountered while attempting to initialize the Gmail API")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Again, pretty much any function will trigger this issue. But here are some examples:
Thread List:
def gmailAPIMessageLabelSearchThreads(self, labelList, userID="me", allPages=False, reverseOrder=False):
try:
self.GLogger.info("Attempting to search email threads with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ ")")
service = self.gmailAPIService
if service is None:
logging.error('Gmail Service not initialized')
return False
response = service.users().threads().list(userId=userID, labelIds=labelList,maxResults=500, fields="threads(id),nextPageToken").execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
if (allPages is True):
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token,maxResults=500, fields="threads(id),nextPageToken").execute()
if 'messages' in response:
messages.extend(response['messages'])
if reverseOrder is True:
messages.reverse()
self.GLogger.info("Successfully searched emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ "). Number of matching emails (" +str(len(messages))+ ")")
return messages
except:
self.GLogger.error("An error was encounrtered while searching for messages with google API and label list")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Send Email:
def gmailAPISendEmail(self, message, userID="me"):
try:
service = self.gmailAPIService
self.GLogger.info("Attempting to send email message")
response = (service.users().messages().send(userId=userID, body=message).execute())
responseID = str(response['id'])
self.GLogger.info("Successfully sent email message with ID (" + responseID +")")
return responseID
except:
self.GLogger.error("Failed to send email message")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Message List:
def gmailAPIMessageLabelSearch(self, labelList, userID="me", allPages=False, reverseOrder=False, numPages=None):
try:
self.GLogger.info("Attempting to search emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ ")")
service = self.gmailAPIService
if service is None:
logging.error('Gmail Service not initialized')
return False
#response = service.users().messages().list(userId=userID, labelIds=labelList, fields='messages(id)').execute()
response = service.users().messages().list(userId=userID, labelIds=labelList, maxResults=500, fields="messages(id),nextPageToken").execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
numPages_Processed = 0
if (allPages is True):
while 'nextPageToken' in response:
page_token = response['nextPageToken']
#response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token, fields='messages(id)', maxResults=500).execute()
response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token, maxResults=500, fields="messages(id),nextPageToken").execute()
if 'messages' in response:
messages.extend(response['messages'])
numPages_Processed = numPages_Processed + 1
if (numPages is not None) and numPages_Processed>=numPages:
break
if reverseOrder is True:
messages.reverse()
self.GLogger.info("Successfully searched emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ "). Number of matching emails (" +str(len(messages))+ ")")
listToReturn = list()
for message in messages:
listToReturn.append(message['id'])
return listToReturn
except:
self.GLogger.error("An error was encounrtered while searching for messages with google API and label list")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Attachment Download:
def gmailAPIDownloadAttachments(self, messageID, message=None, userID="me"):
try:
service = self.gmailAPIService
self.GLogger.info("Attempting to download attachments from messageID (" +str(messageID)+ ")")
if message is None:
message = self.gmailAPIGetFullMessage(messageID, userID=userID)
if message is False:
self.GLogger.error("Failed to extract message (" +str(messageID)+ ") for downloading attachments")
return False
attachmentList = list()
payload = message['payload']
if 'parts' in payload:
parts = payload['parts']
for part in parts:
if part['filename']:
if 'data' in part['body']:
data = part['body']['data']
else:
att_id = part['body']['attachmentId']
att = service.users().messages().attachments().get(userId=userID, messageId=messageID, id=att_id).execute()
data = att['data']
file_data = base64.urlsafe_b64decode(data.encode('UTF-8'))
filename = part['filename']
extSearch = filename.find('.')
if extSearch == -1:
ext = ""
partFileName = filename[0:extSearch]
else:
ext = filename[extSearch+1:]
partFileName = filename[0:extSearch]
theAttachment = Attachment(filename,partFileName, ext, file_data)
attachmentList.append(theAttachment)
self.GLogger.info("Successfully downloaded attachments from messageID (" +str(messageID)+ ")")
return(attachmentList)
except:
self.GLogger.error("Encountered an error while attempting to download email attacments from messageID (" +str(messageID)+ ")")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Batch Requests, where searchResultParts is a list of lists, where each embedded list contains 100 message IDs:
for searchResultPart in searchResultParts:
batch = service.new_batch_http_request(callback=self.theEmailCallback)
for msgID in searchResultPart: #Loop through each messageID
request1 = service.users().messages().get(userId=userID,id=msgID)
batch.add(request=request1 ,request_id=msgID)
batch.execute(http=self.http_toUse)
There are several Python Gmail API functions that I use, and these are just a subset. However, all of them tend to produce the socket.timeout error at some point. Currently, my firewall is off (ufw is inactive).
I believe I am using these functions as intended and believe this issue lies with Google. Are there any Google team members here who could take a look into this? What can I do to resolve this issue? These socket.timeout errors are occurring so often that it is causing issues with my application.
My internet connection is fiber Gigabit for both upload and download.
Edit** I am now calling all of my Python Gmail API requests through this function. It will wait 50 ms (and then 50ms more each retry) and then retry until the request succeeds or until 10 retries have been made. The socket.timeout error is still prevalent, but through my current observations, it seems like most socket.timeout errors go through after 2 or 3 retries.
def executeGmailAPI_withretry(self, request):
try:
response_valid = False
num_retries = 0
while num_retries < 10:
try:
response = request.execute()
response_valid = True
break
except socket.timeout:
num_retries = num_retries + 1
time.sleep(0.05*num_retries)
except:
self.GLogger.error("An error was encounrtered in executeGmailAPI_withretry")
tb = traceback.format_exc()
self.GLogger.exception(tb)
num_retries = num_retries + 1
time.sleep(0.05*num_retries)
if response_valid is False:
return False
else:
return response
except:
self.GLogger.error("An error was encounrtered in executeGmailAPI_withretry")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False

Related

browser doesn't response well to https proxy

I'v built my own https proxy and when ever I send some data to a browser the browser responses with nothing and also after a lot of time.
basically all the proxy should do is just forward the message to browser, get the response and forward back to the client
the code of the proxy:
import socket
import select
serverSock = socket.socket()
serverSock.bind(('0.0.0.0', 8080))
serverSock.listen(3)
waiting_clients = {} # client : browser
users_dict = {}
open_clients = {}
browsers_clients = {} # browser : client
threading.Thread(target=browserCom).start()
while True:
try:
rlist, wlist, xlist = select.select(list(users_dict.keys()) + [serverSock], [], [], 0.3)
except:
pass
else:
for current_socket in rlist:
if current_socket is serverSock:
# new client
client, address = serverSock.accept()
print(f'{address} - connected to proxy')
# add to dictionary
users_dict[client] = address
open_clients[address] = client
else:
# receive info
receiving = True
msg = bytearray()
while receiving:
try:
data = current_socket.recv(1024)
except Exception as e:
print(e, 3)
if current_socket in users_dict.keys():
disconnect(users_dict[current_socket])
else:
current_socket.close()
break
else:
msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
if len(msg) == 0:
if current_socket in users_dict.keys():
disconnect(users_dict[current_socket])
else:
print("GOT FROM CLIENT", msg)
if current_socket in waiting_clients.keys():
# sending the data from client to browser
waiting_clients[current_socket].send(msg)
else:
msg = msg.decode()
msgSplit = msg.split()
address = msgSplit[1]
if address.split(':')[1].isnumeric():
if msg.startswith('CONNECT'):
browserLink, browserPort = address.split(':')
browserPort = int(browserPort)
browserIP = socket.gethostbyname(browserLink)
address = (browserIP, browserPort)
# connect to the site
browserSocket = socket.socket()
print(address)
browserSocket.connect((browserIP, browserPort))
waiting_clients[current_socket] = browserSocket
browsers_clients[browserSocket] = current_socket
msg_ret = "HTTP/1.1 200 Connection established\r\n\r\n"
sendMsg(users_dict[current_socket], msg_ret)
m
The proxy is able to make the connection after the CONNECT and notify to the client but after I send to the browser I got from the data with a function running in the background:
def browserCom():
while True:
try:
rlist, wlist, xlist = select.select(list(browsers_clients.keys()), [], [], 0.3)
except:
pass
else:
for current_browser in rlist:
# receive data from the browser
receiving = True
resp_msg = bytearray()
while receiving:
try:
data = current_browser.recv(1024)
except Exception as e:
print(e)
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
else:
resp_msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
print("RESPONSE FROM BROWSER", resp_msg)
# sending the msg to the client
sendMsg(users_dict[browsers_clients[current_browser]], resp_msg)
I need to wait a lot of time for the response and most of the responses come empty the responses are mostly bytearray(b'') and even when I get the response even though I sent the response back to the client:
# sending the msg to the client
sendMsg(users_dict[browsers_clients[current_browser]],resp_msg)
using this
def sendMsg(address, msg):
"""
:param ip: ip to send to
:param msg: msg to send
:return: sends the msg to the ip
"""
if address in open_clients.keys():
sock = open_clients[address]
if type(msg) == str:
msg = msg.encode()
try:
sock.send(msg)
except Exception as e:
print(e, 4)
disconnect(address)
I hope you are abled to understand my code, please if something is unclear ask me in the comments and I will try to help you understand as soon as possible
this is the best that I can do to keep the code minimal for this problem without removing crucial parts
My mistake was that I didn't understand that while tunneling both the browser and the client exchange messages, adding another select to all the browser helped me check all the data from all the browsers and now it works.
The code above is updated and works
basically what I added is :
def browserCom():
while True:
try:
rlist, wlist, xlist = select.select(list(browsers_clients.keys()), [], [], 0.3)
except:
pass
else:
for current_browser in rlist:
# receive data from the browser
receiving = True
resp_msg = bytearray()
while receiving:
try:
data = current_browser.recv(1024)
except Exception as e:
print(e)
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
else:
resp_msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
# disconnecting browser
if resp_msg == bytearray(b''):
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
print("RESPONSE FROM BROWSER", resp_msg)
# sending the msg to the client
if current_browser in browsers_clients and browsers_clients[current_browser] in users_dict:
sendMsg(users_dict[browsers_clients[current_browser]], resp_msg)

UDP Tracker only gives me my ip as answer to announce request

I've recently been trying to create a torrent client in python, and have just got the UDP announce protocol to work.
The tracker accepts my connect request just fine but only returns my IP and port as the peer list when I announce to it...
I've tried to look at the same torrents in other torrent clients and they have multiple working peers while my request only shows me my computer (I've tried this on many torrents, all return just my IP and port)
Here's the code for the sending function itself:
async def announce_udp(self, try_num = 1):
self.sock.settimeout(15)
answer = {}
inner_while = False
while try_num < 4:
while try_num < 4:
try:
print("trying to send")
sended = self.send(1, self.announce_payload())
print("sending the following packet: {0}".format(sended))
print(self.url)
inner_while = True
break
except Exception:
print("problem in sending")
try_num += 1
if not inner_while:
break
try:
answer = self.interpret(15)
break
except Exception:
print("problem in receiving")
try_num += 1
print("announce answer is: {0}".format(answer))
return answer
here's the code for the make payload function:
def announce_payload(self, downloaded = 0, left = 0, uploaded = 0, event = 0, key = get_transaction_id()):
payload = [self.torrent.get_torrent_info_hash_decoded(), get_peer_id().encode(), downloaded,
self.torrent.get_torrent_size(), uploaded, event, 0, key, -1, 6988]
p_tosend = None
try:
p_tosend = struct.pack('!20s20sqqqiIIiH', *payload)
except Exception as e:
print("there was an error: {0}".format(e))
return p_tosend
here's the code for the interpret + process function:
def interpret(self, timeout=10):
self.sock.settimeout(timeout)
print("got to interpret")
try:
response = self.sock.recv(10240)
print("answer recieved")
except socket.timeout:
print("no answer, try again")
raise TrackerResponseException("no answer", 0)
headers = response[:8]
payload = response[8:]
action, trans_id = struct.unpack('!ll', headers)
try:
trans = self.transactions[trans_id]
except KeyError:
raise TrackerResponseException("InvalidTransaction: id not found", trans_id)
try:
trans['response'] = self.process(action, payload, trans)
except Exception as e:
trans['response'] = None
print("error occured: {0}".format(e))
trans['completed'] = True
del self.transactions[trans_id]
#print(trans)
return trans
def process_announce(self, payload, trans):
response = {}
info = payload[:struct.calcsize("!lll")]
interval, leechers, seeders = struct.unpack("!lll", info)
print(interval, leechers, seeders, "noamsssssss")
peer_data = payload[struct.calcsize("!lll"):]
peer_size = struct.calcsize("!lH")
num_of_peers = int(len(peer_data) / peer_size)
print("the number of peers is: {0} and the peer data is: {1}".format(num_of_peers, peer_data))
print()
peers = []
for peer_offset in range(num_of_peers):
off = peer_size * peer_offset
peer = peer_data[off:off + peer_size]
addr, port = struct.unpack("!lH", peer)
peers.append({
'addr': socket.inet_ntoa(struct.pack('!L', addr)),
'port': port,
})
print(payload)
return dict(interval=interval, leechers=leechers, seeders=seeders, peers=peers)
I'm sorry if any of this is irrelevant, but I want to give you all of the code incase it tells you something.
(get_peer_id() returns a random peer id per the tracker protocol specification, and the get_transaction_id() returns random.randint(0, 1 << 32 - 1))
EDIT:
Alright, I've found the problem and now I'm feeling pretty dumb...
turns out even in the udp tracker whenever you send the info hash it has to be SHA1 encoded.
Hopefully this can help someone if they are stuck in the same problem :)

Lambda Gives an error { "errorMessage": "Process exited before completing request" }

Trying to execute this lambda function and getting error "{
"errorMessage": "RequestId: 6db7d67e-78e9-43e5-a325-09206e4514ac Process exited before completing request"
}
"
I am looking the script to notify the AWS IAM Users when their password and access keys expire.
from __future__ import print_function
import boto3
from botocore.exceptions import ClientError
import os
import json
import csv
from time import sleep
import datetime
import dateutil.parser
import sys
# These should be passed in via Lambda Environment Variables
try:
BLACKHOLE_GROUPNAME = os.environ['BLACKHOLE_GROUPNAME']
ACTION_TOPIC_ARN = os.environ['ACTION_TOPIC_ARN']
GRACE_PERIOD = int(os.environ['GRACE_PERIOD'])
DISABLE_USERS = os.environ['DISABLE_USERS']
SEND_EMAIL = os.environ['SEND_EMAIL']
FROM_ADDRESS = os.environ['FROM_ADDRESS']
EXPLANATION_FOOTER = os.environ['EXPLANATION_FOOTER']
EXPLANATION_HEADER = os.environ['EXPLANATION_HEADER']
except KeyError as e:
print("Key Error: " + e.message)
sys.exit(1)
# Define a Global String to be the report output sent to ACTION_TOPIC_ARN
ACTION_SUMMARY = ""
REPORT_SUMMARY = ""
print('Loading function')
if DISABLE_USERS == "true":
expired_message = "\n\tYour Password is {} days post expiration. Your permissions have been revoked. "
key_expired_message = "\n\tYour AccessKey ID {} is {} days post expiration. It has been deactivated. "
else:
expired_message = "\n\tYour Password is {} days post expiration. You must change your password or risk losing access. "
key_expired_message = "\n\tYour AccessKey ID {} is {} days post expiration. You must rotate this key or it will be deactivated. "
key_warn_message = "\n\tYour AccessKey ID {} is {} days from expiration. You must rotate this key or it will be deactivated. "
password_warn_message = "\n\tYour Password will expire in {} days"
email_subject = "Credential Expiration Notice From AWS Account: {}"
def lambda_handler(event, context):
print("Received event: " + json.dumps(event, sort_keys=True))
iam_client = boto3.client('iam')
try:
if event['source'] == "aws.iam" :
process_IAMEvent(event, context, iam_client)
else:
process_UsersCron(iam_client)
except KeyError as e:
# Probably called as a test event with out a source. This is what we want to do here.
process_UsersCron(iam_client)
return
def process_UsersCron(iam_client):
global ACTION_SUMMARY # This is what we send to the admins
global REPORT_SUMMARY
max_age = get_max_password_age(iam_client)
account_name = iam_client.list_account_aliases()['AccountAliases'][0]
credential_report = get_credential_report(iam_client)
# Iterate over the credential report, use the report to determine password expiration
# Then query for access keys, and use the key creation data to determine key expiration
for row in credential_report:
if row['password_enabled'] != "true": continue # Skip IAM Users without passwords, they are service accounts
message = "" # This is what we send to the user
if is_user_expired(row['user']) == 0:
# Process their password
password_expires = days_till_expire(row['password_last_changed'], max_age)
if password_expires <= 0:
REPORT_SUMMARY = REPORT_SUMMARY + "\n{}'s Password expired {} days ago".format(row['user'], password_expires * -1)
message = message + expired_message.format(password_expires * -1)
add_user_to_blackhole(row['user'], iam_client)
elif password_expires < GRACE_PERIOD :
message = message + password_warn_message.format(password_expires)
REPORT_SUMMARY = REPORT_SUMMARY + "\n{}'s Password Will expire in {} days".format(row['user'], password_expires)
try:
# Process their Access Keys
response = iam_client.list_access_keys( UserName=row['user'] )
for key in response['AccessKeyMetadata'] :
if key['Status'] == "Inactive" : continue
key_expires = days_till_expire(key['CreateDate'], max_age)
if key_expires <= 0:
message = message + key_expired_message.format(key['AccessKeyId'], key_expires * -1)
disable_users_key(key['AccessKeyId'], row['user'], iam_client)
REPORT_SUMMARY = REPORT_SUMMARY + "\n {}'s Key {} expired {} days ago ".format(row['user'], key['AccessKeyId'], key_expires * -1 )
elif key_expires < GRACE_PERIOD:
message = message + key_warn_message.format(key['AccessKeyId'], key_expires)
REPORT_SUMMARY = REPORT_SUMMARY + "\n {}'s Key {} will expire {} days from now ".format(row['user'], key['AccessKeyId'], key_expires)
except ClientError as e:
continue
# Email user if necessary
if message != "":
email_user(row['user'], message, account_name)
# All Done. Send a summary to the ACTION_TOPIC_ARN, and print one out for the Lambda Logs
print("Action Summary:" + ACTION_SUMMARY)
if ACTION_SUMMARY != "": send_summary()
if REPORT_SUMMARY != "": email_user(FROM_ADDRESS, REPORT_SUMMARY, account_name )
return
def is_user_expired(username):
client = boto3.client('iam')
try:
response = client.list_groups_for_user(UserName=username)
except ClientError as e:
return 1
for group in response['Groups'] :
if group['GroupName'] == BLACKHOLE_GROUPNAME:
return 1
return 0
def email_user(email, message, account_name):
global ACTION_SUMMARY # This is what we send to the admins
if SEND_EMAIL != "true": return # Abort if we're not supposed to send email
if message == "": return # Don't send an empty message
client = boto3.client('ses')
body = EXPLANATION_HEADER + "\n" + message + "\n\n" + EXPLANATION_FOOTER
try:
response = client.send_email(
Source=FROM_ADDRESS,
Destination={ 'ToAddresses': [ email ] },
Message={
'Subject': { 'Data': email_subject.format(account_name) },
'Body': { 'Text': { 'Data': body } }
}
)
ACTION_SUMMARY = ACTION_SUMMARY + "\nEmail Sent to {}".format(email)
return
except ClientError as e:
print("Failed to send message to {}: {}".format(email, e.message))
ACTION_SUMMARY = ACTION_SUMMARY + "\nERROR: Message to {} was rejected: {}".format(email, e.message)
def days_till_expire(last_changed, max_age):
# Ok - So last_changed can either be a string to parse or already a datetime object.
# Handle these accordingly
if type(last_changed) is str:
last_changed_date=dateutil.parser.parse(last_changed).date()
elif type(last_changed) is datetime.datetime:
last_changed_date=last_changed.date()
else:
# print("last_changed", last_changed)
# print(type(last_changed))
return -99999
expires = (last_changed_date + datetime.timedelta(max_age)) - datetime.date.today()
return(expires.days)
# Request the credential report, download and parse the CSV.
def get_credential_report(iam_client):
resp1 = iam_client.generate_credential_report()
if resp1['State'] == 'COMPLETE' :
try:
response = iam_client.get_credential_report()
credential_report_csv = response['Content']
# print(credential_report_csv)
reader = csv.DictReader(credential_report_csv.splitlines())
# print(reader.fieldnames)
credential_report = []
for row in reader:
credential_report.append(row)
return(credential_report)
except ClientError as e:
print("Unknown error getting Report: " + e.message)
else:
sleep(2)
return get_credential_report(iam_client)
# Query the account's password policy for the password age. Return that number of days
def get_max_password_age(iam_client):
try:
response = iam_client.get_account_password_policy()
return response['PasswordPolicy']['MaxPasswordAge']
except ClientError as e:
print("Unexpected error in get_max_password_age: %s" + e.message)
# if called by an IAM Event, do stuff. Not yet implemented
def process_IAMEvent(event, context, iam_client):
api_call = event['detail']['eventName']
if api_call == "CreateLoginProfile" :
process_CreateLoginProfile(event,context)
return 0
elif api_call == "EnableMFADevice" :
process_EnableMFADevice(event,context)
return 0
elif api_call == "DeactivateMFADevice" :
process_DeactivateMFADevice(event,context)
return 0
else:
raise Exception("Invalid API Call: " + api_call)
# Add the user to the group that only allows them to reset their password
def add_user_to_blackhole(username, iam_client):
if DISABLE_USERS != "true": return
global ACTION_SUMMARY
ACTION_SUMMARY = ACTION_SUMMARY + "\nAdding {} to Blackhole Group".format(username)
response = iam_client.add_user_to_group(
GroupName=os.environ['BLACKHOLE_GROUPNAME'],
UserName=username
)
if response['ResponseMetadata']['HTTPStatusCode'] != 200:
handle_error("Adding User to Blackhole Group", username, response['ResponseMetadata'])
else:
return 0
# Turn off the specified user's key by setting it to inactive.
def disable_users_key(AccessKeyId, UserName, iam_client):
if DISABLE_USERS != "true": return
global ACTION_SUMMARY
ACTION_SUMMARY = ACTION_SUMMARY + "\nDisabling AccessKeyId {} for user {}".format(AccessKeyId, UserName)
response = iam_client.update_access_key(
UserName=UserName,
AccessKeyId=AccessKeyId,
Status='Inactive'
)
if response['ResponseMetadata']['HTTPStatusCode'] != 200:
handle_error("Adding User to Blackhole Group", username, response['ResponseMetadata'])
else:
return 0
# Not used, but would remove the user from the blackhole group once they did change their password
def remove_user_from_blackhole(username, iam_client):
response = iam_client.remove_user_from_group(
GroupName=os.environ['BLACKHOLE_GROUPNAME'],
UserName=username
)
if response['ResponseMetadata']['HTTPStatusCode'] != 200:
handle_error("Removing User from Blackhole Group", username, response['ResponseMetadata'])
else:
return 0
def handle_error(action, username, ResponseMetadata):
raise Exception("ERROR" + action + " User: " + username + " Details: " + ResponseMetadata)
# Send the Summary of actions taken to the SNS topic
def send_summary():
global ACTION_SUMMARY
client = boto3.client('sns')
message = "The following Actions were taken by the Expire Users Script at {}: ".format( datetime.datetime.now() ) + ACTION_SUMMARY
response = client.publish(
TopicArn=ACTION_TOPIC_ARN,
Message=message,
Subject="Expire Users Report for {}".format(datetime.date.today())
)
Sorry, if its repeated. Couldn't find a solution. If someone has script to notify IAM Users about their password expiry, that would be fine as well.
Thank You.
Have you checked the memory, if you ran out of memory you are also getting that message.

OAuth Handshake Copy Error

I am trying to build a PoC with the API REST of Copy but I have a problem when I try to get the ACCESS TOKEN:
Message: oauth_problem=signature_invalid&debug_sbs=GET&https%3A%2F%2Fapi.copy.com%...
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
# Create your consumer with the proper key/secret.
consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET)
print "Consumer: ", consumer
client = oauth.Client(consumer)
url = access_url + "?oauth_verifier=%s&oauth_token=%s" % (oauth_verifier, oauth_token)
print url
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
I would appreciate any help.
I have been able to solve my own issue. The problem was the creation of a new consumer (I had one for the first step of the oauth handshake) and not using the oauth.Token provided by the library (I put the oauth_verifier and the oauth_token with a workaround)
The solution:
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
try:
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
token = oauth.Token(oauth_token, request_token_secret) # request_token_secret is global
token.set_verifier(oauth_verifier)
client = oauth.Client(consumer, token) #consumer is global
url = "https://api.copy.com/oauth/access"
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
except Exception as e:
return e.message()

Is it possible to use OAUTH 2 with the Google Reporting API?

I am currently using OAuth 1 for auth with the Reporting API with GData and Python. Is it possible to use OAuth 2, I can't find a reference that this is doable?
I wasn't able to find any reference for the OAuth 2 and the Reporting api but by following samples for the GData libraries (http://code.google.com/p/gdata-python-client/source/browse/#hg%2Fsamples%2Fapps) I was able to cobble this together:
#!/usr/bin/python
import sys
import os
import time
import gdata.gauth
import gdata.client
import httplib2
import oauth2client.file
import oauth2client.tools
REPORTING_URI = 'https://www.google.com/hosted/services/v1.0/reports/ReportingData'
REPORTING_XML_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<rest xmlns="google:accounts:rest:protocol"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<type>Report</type>
<domain>%s</domain>
<date>%s</date>
<page>%s</page>
<reportType>daily</reportType>
<reportName>%s</reportName>
</rest>'''
OAUTH2FILENAME = 'oauth_20.dat'
OAUTH2JSONFILE = 'client_secrets.json'
OAUTH2SCOPES = 'https://www.google.com/hosted/services/v1.0/reports/ReportingData'
OAUTH2USERAGENT = 'REPORTING'
CLIENTSOURCE = 'REPORTING'
MISSING_OAUTHJSON_FILE_MESSAGE = """
WARNING: Please configure OAuth 2.0
To continue you will need to populate the client_secrets.json file:
%s
with information from the APIs Console <https://code.google.com/apis/console>.
""" % os.path.join(os.path.dirname(__file__), OAUTH2JSONFILE)
### Reporting
def RunReport (http_object, domain, report=None, date=None):
if date is None:
now = time.time()
report_time = time.gmtime(now)
date = time.strftime("%Y-%m-%d",report_time)
if report is None:
report='accounts'
report_data = RequestReport(http_object,domain=domain,report=report,date=date)
if not report_data:
print 'No report data'
return report_data
def RequestReport (http_object, domain=None, report=None, date=None):
"""Retrieves a report
Args:
domain: string
report: string: accounts, activity, disk_space, email_clients, summary
date: string: YYYY-MM-DD
Returns:
String, the report data
"""
report_data = ''
uri = REPORTING_URI
if not report or report is None:
return report_data
if not date or date is None:
return report_data
if not domain or domain is None:
domain = self.domain
page = 1
while True:
report_xml = REPORTING_XML_TEMPLATE %(domain, date, page, report)
response = ''
report_page = ''
try:
response, report_page = http_object.request(
uri,method='POST',body=report_xml)
except Exception, rexcept:
print 'Exception: ',rexcept
report_page = ''
break
if response.status != 200:
print 'Error: ',response.status
report_page = ''
break
if not report_page or report_page == 'End-Of-Report':
break
else:
report_data += report_page
page = page + 1
return report_data
scopes = OAUTH2SCOPES
user_agent = OAUTH2USERAGENT
client_source = CLIENTSOURCE
str_oauth2file = OAUTH2FILENAME
str_oauthjsonfile = OAUTH2JSONFILE
domain = 'somedomain'
report_name = 'accounts'
client_id = 'string'
client_secret = 'string'
report_data = ''
oauth2_flow = ''
now = time.time()
report_time = time.gmtime(now)
report_date = time.strftime("%Y-%m-%d",report_time)
if not os.path.isfile(str_oauth2file):
token = gdata.gauth.OAuth2Token(client_id=client_id,
client_secret=client_secret, scope=scopes, user_agent=user_agent)
uri = token.generate_authorize_url()
print 'Please visit this URL to authorize the application:'
print uri
# Get the verification code from the standard input.
code = raw_input('What is the verification code? ').strip()
token.get_access_token(code)
oauth2_flow = oauth2client.client.flow_from_clientsecrets(str_oauthjsonfile,
scope=scopes,message=MISSING_OAUTHJSON_FILE_MESSAGE)
storage = oauth2client.file.Storage(str_oauth2file)
oauth2_credentials = storage.get()
if oauth2_credentials is None or oauth2_credentials.invalid:
if not oauth2_flow:
oauth2_flow = oauth2client.client.flow_from_clientsecrets(str_oauthjsonfile,
scope=scopes,message=MISSING_OAUTHJSON_FILE_MESSAGE)
print '\nYou must authorize access to the request APIS.\n'
# Save the credentials in storage to be used in subsequent runs.
oauth2_credentials = oauth2client.tools.run(oauth2_flow, storage)
http_oauth2_object = httplib2.Http()
http_oauth2_object = oauth2_credentials.authorize(http_oauth2_object)
report_data = RunReport(
http_oauth2_object,domain,report=report_name,date=report_date)
if report_data:
print report_data
sys.exit(0)

Categories