I am trying to build a PoC with the API REST of Copy but I have a problem when I try to get the ACCESS TOKEN:
Message: oauth_problem=signature_invalid&debug_sbs=GET&https%3A%2F%2Fapi.copy.com%...
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
# Create your consumer with the proper key/secret.
consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET)
print "Consumer: ", consumer
client = oauth.Client(consumer)
url = access_url + "?oauth_verifier=%s&oauth_token=%s" % (oauth_verifier, oauth_token)
print url
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
I would appreciate any help.
I have been able to solve my own issue. The problem was the creation of a new consumer (I had one for the first step of the oauth handshake) and not using the oauth.Token provided by the library (I put the oauth_verifier and the oauth_token with a workaround)
The solution:
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
try:
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
token = oauth.Token(oauth_token, request_token_secret) # request_token_secret is global
token.set_verifier(oauth_verifier)
client = oauth.Client(consumer, token) #consumer is global
url = "https://api.copy.com/oauth/access"
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
except Exception as e:
return e.message()
Related
I'm trying to retrieve the information within the non_public_metrics field in twitter API (i.e, "impression_count", "url_link_clicks", "user_profile_clicks"). I was able to access the public_metrics field using only the Bearer Token. But, when I include the non_public_metrics in my query params I got the error Field Authorization Error. Here is my code:
import requests
import collections
import os
from dotenv import load_dotenv
load_dotenv()
def auth():
return os.getenv('TWITTER_TOKEN')
def create_headers(bearer_token):
headers = {"Authorization": "Bearer {}".format(bearer_token)}
return headers
def create_url(keyword, start_date, end_date, max_results = 10):
ttid = 1184334528837574656
search_url = f"https://api.twitter.com/2/users/{ttid}/tweets" #Change to the endpoint you want to collect data from
#change params based on the endpoint you are using
query_params = {'start_time': start_date,
'end_time': end_date,
'max_results': max_results,
'tweet.fields': 'public_metrics,created_at,non_public_metric',#remove non_public_metric and the code will work
'next_token': {}}
return (search_url, query_params)
def connect_to_endpoint(url, headers, params, next_token = None):
params['next_token'] = next_token #params object received from create_url function
response = requests.request("GET", url, headers = headers, params = params)
print("Endpoint Response Code: " + str(response.status_code))
if response.status_code != 200:
raise Exception(response.status_code, response.text)
return response.json()
def flatten(d, parent_key='', sep='_'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
#Inputs for the request
bearer_token = auth()
headers = create_headers(bearer_token)
keyword = "xbox lang:en"
start_time = "2021-12-01T00:00:00.000Z"
end_time = "2021-12-22T00:00:00.000Z"
max_results = 100
url = create_url(keyword, start_time,end_time, max_results)
json_response = connect_to_endpoint(url[0], headers, url[1])
print(json_response['data']) #if non_public_metrics is included, this throws a error
Then I read in Twitter Docs that I need to use OAuth1.0 authorization in order to access the field non_public_metrics. I tried to use one of the sample codes available in twitter-dev GH'page that uses OAuth1.0 authentication. Here is the snippet I used:
from requests_oauthlib import OAuth1Session
import os
import json
from dotenv import load_dotenv
load_dotenv()
consumer_key = os.getenv("CONSUMER_KEY")
consumer_secret = os.getenv("CONSUMER_SECRET")
#I actually used an ID associate to my account, not this one
params = {"ids": "1184334528837574656", "tweet.fields": "public_metrics,created_at,non_public_metrics"}
request_token_url = "https://api.twitter.com/oauth/request_token"
oauth = OAuth1Session(consumer_key, client_secret=consumer_secret)
try:
fetch_response = oauth.fetch_request_token(request_token_url)
except ValueError:
print(
"There may have been an issue with the consumer_key or consumer_secret you entered."
)
resource_owner_key = fetch_response.get("oauth_token")
resource_owner_secret = fetch_response.get("oauth_token_secret")
print("Got OAuth token: %s" % resource_owner_key)
# Get authorization
base_authorization_url = "https://api.twitter.com/oauth/authorize"
authorization_url = oauth.authorization_url(base_authorization_url)
print("Please go here and authorize: %s" % authorization_url)
verifier = input("Paste the PIN here: ")
# Get the access token
access_token_url = "https://api.twitter.com/oauth/access_token"
oauth = OAuth1Session(
consumer_key,
client_secret=consumer_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
verifier=verifier,
)
oauth_tokens = oauth.fetch_access_token(access_token_url)
access_token = oauth_tokens["oauth_token"]
access_token_secret = oauth_tokens["oauth_token_secret"]
# Make the request
oauth = OAuth1Session(
consumer_key,
client_secret=consumer_secret,
resource_owner_key=access_token,
resource_owner_secret=access_token_secret,
)
response = oauth.get(
"https://api.twitter.com/2/tweets", params=params
)
if response.status_code != 200:
raise Exception(
"Request returned an error: {} {}".format(response.status_code, response.text)
)
print("Response code: {}".format(response.status_code))
json_response = response.json()
print(json.dumps(json_response, indent=4, sort_keys=True))
This snippet, however, leads me to a similar error "Sorry, you are not authorized to access 'non_public_metrics.impression_count' on the Tweet with ids. Besides, this snippet has the huge incovenient of ask me to click a link and generate a PIN every time I need to request information for a particular tweet.
How can I properly request information on non_public_metrics field for my tweets?
One can retrieve the information within the non_public_metrics field using the url https://api.twitter.com/2/tweets/[YOU_TWEET_ID]?tweet.fields=non_public_metrics in Postman. To do the same in python just use the following snippet:
import os
from requests_oauthlib import OAuth1
import requests
from dotenv import load_dotenv
load_dotenv()
YOUR_TWEET_ID = ''
url = f'https://api.twitter.com/2/tweets/{YOUR_TWEET_ID}?tweet.fields=public_metrics,non_public_metrics'
CONSUMER_KEY=os.getenv('CONSUMER_KEY')
CONSUMER_SECRET=os.getenv('CONSUMER_SECRET')
ACCESS_TOKEN=os.getenv('ACCESS_TOKEN')
ACCESS_SECRET=os.getenv('ACCESS_SECRET')
headeroauth = OAuth1(CONSUMER_KEY, CONSUMER_SECRET,ACCESS_TOKEN, ACCESS_SECRET, signature_type='auth_header')
r = requests.get(url, auth=headeroauth)
print(r.json())
The below is a result of this question How to sign an OKEx API request? and some of the answers:
import hmac
import base64
import requests
import datetime
import json
from config import KEY, SECRET, PASS, ROOT_URL
def get_time():
now = datetime.datetime.utcnow()
t = now.isoformat("T", "milliseconds")
return t + "Z"
def signature(timestamp, request_type, endpoint, body, secret):
if body != '':
body = json.dumps(body)
message = str(timestamp) + str.upper(request_type) + endpoint + body
print(message)
mac = hmac.new(bytes(secret, encoding='utf-8'), bytes(message, encoding='utf-8'), digestmod='sha256')
d = mac.digest()
return base64.b64encode(d)
def get_header(request_type, endpoint, body):
time = get_time()
header = dict()
header['CONTENT-TYPE'] = 'application/json'
header['OK-ACCESS-KEY'] = KEY
header['OK-ACCESS-SIGN'] = signature(time, request_type, endpoint, body, SECRET)
header['OK-ACCESS-TIMESTAMP'] = str(time)
header['OK-ACCESS-PASSPHRASE'] = PASS
return header
def get(endpoint, body=''):
url = ROOT_URL + endpoint
header = get_header('GET', endpoint, body)
return requests.get(url, headers=header)
def post(endpoint, body=''):
url = ROOT_URL + endpoint
header = get_header('POST', endpoint, body)
return requests.post(url, headers=header)
where KEY, SECRET, PASS are the API key, secret key, and pass phrase respectively; The ROOT_URL is 'https://www.okex.com'.
The Problem
GET requests work absolutely fine, so when I run the following, there are no issues:
ENDPOINT = '/api/v5/account/balance'
BODY = ''
response = get(ENDPOINT)
response.json()
However, when I try to place an order via a POST request, like so:
ENDPOINT = '/api/v5/trade/order'
BODY = {"instId":"BTC-USDT",
"tdMode":"cash",
"side":"buy",
"ordType":"market",
"sz":"1"}
response = post(ENDPOINT, body=BODY)
response.json()
I get the following output, i.e. it won't accept the signature:
{'msg': 'Invalid Sign', 'code': '50113'}
Related Questions
In this one Can't figure out how to send a signed POST request to OKEx an answer was provided, but it does not work for me as I was already using the suggested URL. More or less the same question was asked here Unable to send a post requests OKEX Invalid Signature, but no activity likely due to the format, so I thought I would repost and elaborate.
OKEX Docs
The docs simply specify that The API endpoints of Trade require authentication (https://www.okex.com/docs-v5/en/?python#rest-api-authentication-signature). But they make no reference to there being any difference between the two methods. Away from that, I am including all required parameters in the body of the post request as far as I can see.
I would appreciate any input on this.
Many thanks!
I ran into the same POST problem and figured it out. I used new domain name okex.com. Here is my code.
def set_userinfo(self):
position_path = "/api/v5/account/set-position-mode"
try:
self.get_header("POST", position_path, {"posMode":"net_mode"})
resp = requests.post(url=self.base_url+position_path, headers=self.headers, json={"posMode":"long_short_mode"}).json()
except Exception as e:
log.error("OK set_userinfo error={} type={}".format(f'{e}', f'{type(e)}'))
def get_header(self, request_type, endpoint, body=''):
timestamp = self.get_time()
self.headers["OK-ACCESS-TIMESTAMP"] = timestamp
self.headers["OK-ACCESS-SIGN"] = self.signature(timestamp, request_type, endpoint, body)
def signature(self, timestamp, request_type, endpoint, body):
if body != '':
body = json.dumps(body)
message = str(timestamp) + str.upper(request_type) + endpoint + body
mac = hmac.new(bytes(self.secret_key, encoding='utf-8'), bytes(message, encoding='utf-8'), digestmod='sha256').digest()
return base64.b64encode(mac)
I have fix the same problem.
Both of the 'body' in signature() and in get_header() should be json.
So you should add following code:
if str(body) == '{}' or str(body) == 'None':
body = ''
else:
body = json.dumps(body)
I ran into the same problem and solved it using below code snippet, the idea is from https://stackoverflow.com/a/68115787/20497127, but I modified a little by adding POST functionality
APIKEY = "" # input key
APISECRET = "" #input secret
PASS = "" #input passphrase
BASE_URL = 'https://www.okx.com'
def send_signed_request(http_method, url_path, payload={}):
def get_time():
return dt.datetime.utcnow().isoformat()[:-3]+'Z'
def signature(timestamp, method, request_path, body, secret_key):
if str(body) == '{}' or str(body) == 'None':
body = ''
message = str(timestamp) + str.upper(method) + request_path + str(body)
mac = hmac.new(bytes(secret_key, encoding='utf8'), bytes(message, encoding='utf-8'), digestmod='sha256')
d = mac.digest()
return base64.b64encode(d)
# set request header
def get_header(request='GET', endpoint='', body:dict=dict()):
cur_time = get_time()
header = dict()
header['CONTENT-TYPE'] = 'application/json'
header['OK-ACCESS-KEY'] = APIKEY
header['OK-ACCESS-SIGN'] = signature(cur_time, request, endpoint , body, APISECRET)
header['OK-ACCESS-TIMESTAMP'] = str(cur_time)
header['OK-ACCESS-PASSPHRASE'] = PASS
# demo trading: need to set x-simulated-trading=1, live trading is 0
header['x-simulated-trading'] = '1'
return header
url = BASE_URL + url_path
header = get_header(http_method, url_path, payload)
print(url)
print(header)
if http_method == 'GET':
response = requests.get(url, headers=header)
elif http_method == 'POST':
response = requests.post(url, headers=header, data=payload)
return response.json()
# this will run get requests
res = send_signed_request("GET", "/api/v5/account/balance", payload={})
# this will run post requests
data = {
"instId": "BTC-USDT",
"tdMode": "cross",
"side": "sell",
"ccy":"USDT",
"ordType": "limit",
"px": "100000",
"sz": "0.01"
}
res = send_signed_request("POST", "/api/v5/trade/order", payload=json.dumps(data))
My program which utilizes the Python Gmail API has been encountering alot of socket.timeout errors. I am very frequently receiving the following:
socket.timeout: The read operation timed out
This error appears to be random and generally occurs with any Gmail API function. I have tried modifying the socket timeout parameter. However, changing this parameter does not seem to remove the issue. I have varied it from 1 second, 10 seconds, and 600 seconds.
socket.setdefaulttimeout(10)
Or for an httplib2.Http object:
def build_http(self):
"""Builds httplib2.Http object
Returns:
A httplib2.Http object, which is used to make http requests, and which has timeout set by default.
To override default timeout call
socket.setdefaulttimeout(timeout_in_sec)
before interacting with this method.
"""
try:
return httplib2.Http(timeout=10)
except:
self.GLogger.error("An error was encountered in build_http")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
The following code is used to initialize the Gmail API service.
def gmailAPIInitialize(self):
try:
self.waitForInternet()
self.GLogger.info("Initializing the Gmail API Service")
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if self.reprocess is True:
token_pickle_file = 'Gmail_token_2.pickle'
credentials_file = 'Gmail_credentials_2.json'
else:
token_pickle_file = 'Gmail_token_1.pickle'
credentials_file = 'Gmail_credentials_1.json'
if os.path.exists(token_pickle_file):
with open(token_pickle_file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
credentials_file, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(token_pickle_file, 'wb') as token:
pickle.dump(creds, token)
#service = build('gmail', 'v1', credentials=creds, cache_discovery=False)
service = build('gmail', 'v1', credentials=creds, cache_discovery=False)
self.gmailAPIService = service
self.GLogger.info("Successfully initialized the Gmail API Service")
return True
except:
self.GLogger.error("An error was encountered while attempting to initialize the Gmail API")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Again, pretty much any function will trigger this issue. But here are some examples:
Thread List:
def gmailAPIMessageLabelSearchThreads(self, labelList, userID="me", allPages=False, reverseOrder=False):
try:
self.GLogger.info("Attempting to search email threads with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ ")")
service = self.gmailAPIService
if service is None:
logging.error('Gmail Service not initialized')
return False
response = service.users().threads().list(userId=userID, labelIds=labelList,maxResults=500, fields="threads(id),nextPageToken").execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
if (allPages is True):
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token,maxResults=500, fields="threads(id),nextPageToken").execute()
if 'messages' in response:
messages.extend(response['messages'])
if reverseOrder is True:
messages.reverse()
self.GLogger.info("Successfully searched emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ "). Number of matching emails (" +str(len(messages))+ ")")
return messages
except:
self.GLogger.error("An error was encounrtered while searching for messages with google API and label list")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Send Email:
def gmailAPISendEmail(self, message, userID="me"):
try:
service = self.gmailAPIService
self.GLogger.info("Attempting to send email message")
response = (service.users().messages().send(userId=userID, body=message).execute())
responseID = str(response['id'])
self.GLogger.info("Successfully sent email message with ID (" + responseID +")")
return responseID
except:
self.GLogger.error("Failed to send email message")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Message List:
def gmailAPIMessageLabelSearch(self, labelList, userID="me", allPages=False, reverseOrder=False, numPages=None):
try:
self.GLogger.info("Attempting to search emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ ")")
service = self.gmailAPIService
if service is None:
logging.error('Gmail Service not initialized')
return False
#response = service.users().messages().list(userId=userID, labelIds=labelList, fields='messages(id)').execute()
response = service.users().messages().list(userId=userID, labelIds=labelList, maxResults=500, fields="messages(id),nextPageToken").execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
numPages_Processed = 0
if (allPages is True):
while 'nextPageToken' in response:
page_token = response['nextPageToken']
#response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token, fields='messages(id)', maxResults=500).execute()
response = service.users().messages().list(userId=userID, labelIds=labelList, pageToken=page_token, maxResults=500, fields="messages(id),nextPageToken").execute()
if 'messages' in response:
messages.extend(response['messages'])
numPages_Processed = numPages_Processed + 1
if (numPages is not None) and numPages_Processed>=numPages:
break
if reverseOrder is True:
messages.reverse()
self.GLogger.info("Successfully searched emails with labelList (" + str(labelList)+ ") and userID (" +str(userID)+ "). Number of matching emails (" +str(len(messages))+ ")")
listToReturn = list()
for message in messages:
listToReturn.append(message['id'])
return listToReturn
except:
self.GLogger.error("An error was encounrtered while searching for messages with google API and label list")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Attachment Download:
def gmailAPIDownloadAttachments(self, messageID, message=None, userID="me"):
try:
service = self.gmailAPIService
self.GLogger.info("Attempting to download attachments from messageID (" +str(messageID)+ ")")
if message is None:
message = self.gmailAPIGetFullMessage(messageID, userID=userID)
if message is False:
self.GLogger.error("Failed to extract message (" +str(messageID)+ ") for downloading attachments")
return False
attachmentList = list()
payload = message['payload']
if 'parts' in payload:
parts = payload['parts']
for part in parts:
if part['filename']:
if 'data' in part['body']:
data = part['body']['data']
else:
att_id = part['body']['attachmentId']
att = service.users().messages().attachments().get(userId=userID, messageId=messageID, id=att_id).execute()
data = att['data']
file_data = base64.urlsafe_b64decode(data.encode('UTF-8'))
filename = part['filename']
extSearch = filename.find('.')
if extSearch == -1:
ext = ""
partFileName = filename[0:extSearch]
else:
ext = filename[extSearch+1:]
partFileName = filename[0:extSearch]
theAttachment = Attachment(filename,partFileName, ext, file_data)
attachmentList.append(theAttachment)
self.GLogger.info("Successfully downloaded attachments from messageID (" +str(messageID)+ ")")
return(attachmentList)
except:
self.GLogger.error("Encountered an error while attempting to download email attacments from messageID (" +str(messageID)+ ")")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
Batch Requests, where searchResultParts is a list of lists, where each embedded list contains 100 message IDs:
for searchResultPart in searchResultParts:
batch = service.new_batch_http_request(callback=self.theEmailCallback)
for msgID in searchResultPart: #Loop through each messageID
request1 = service.users().messages().get(userId=userID,id=msgID)
batch.add(request=request1 ,request_id=msgID)
batch.execute(http=self.http_toUse)
There are several Python Gmail API functions that I use, and these are just a subset. However, all of them tend to produce the socket.timeout error at some point. Currently, my firewall is off (ufw is inactive).
I believe I am using these functions as intended and believe this issue lies with Google. Are there any Google team members here who could take a look into this? What can I do to resolve this issue? These socket.timeout errors are occurring so often that it is causing issues with my application.
My internet connection is fiber Gigabit for both upload and download.
Edit** I am now calling all of my Python Gmail API requests through this function. It will wait 50 ms (and then 50ms more each retry) and then retry until the request succeeds or until 10 retries have been made. The socket.timeout error is still prevalent, but through my current observations, it seems like most socket.timeout errors go through after 2 or 3 retries.
def executeGmailAPI_withretry(self, request):
try:
response_valid = False
num_retries = 0
while num_retries < 10:
try:
response = request.execute()
response_valid = True
break
except socket.timeout:
num_retries = num_retries + 1
time.sleep(0.05*num_retries)
except:
self.GLogger.error("An error was encounrtered in executeGmailAPI_withretry")
tb = traceback.format_exc()
self.GLogger.exception(tb)
num_retries = num_retries + 1
time.sleep(0.05*num_retries)
if response_valid is False:
return False
else:
return response
except:
self.GLogger.error("An error was encounrtered in executeGmailAPI_withretry")
tb = traceback.format_exc()
self.GLogger.exception(tb)
return False
So I am trying to request a definition from oxford dictionary through their API and when the user type !define (users word) the bot returns the definition. I am having a few issues see the code below.
#client.command()
async def define(word_id):
app_id = '************'
app_key = '***********'
language = 'en'
url = 'https://od-api.oxforddictionaries.com:443/api/v1/entries/' +
language + '/' + word_id.lower()
r = requests.get(url, headers={'app_id': app_id, 'app_key': app_key})
await client.say("The definition is " + ("text \n" + r.text))
The error I'm getting is as follows:
discord.errors.HTTPException: BAD REQUEST (status code: 400) + discord.ext.commands.errors.CommandInvokeError: Command raised an exception: HTTPException: BAD REQUEST (status code: 400)
Here is what they expect me to use:
import requests
import json
# TODO: replace with your own app_id and app_key
app_id = '****'
app_key = '******'
language = 'en'
word_id = 'Ace'
url = 'https://od-api.oxforddictionaries.com:443/api/v1/entries/' +
language + '/' + word_id.lower()
r = requests.get(url, headers = {'app_id': app_id, 'app_key':
app_key})
print("code {}\n".format(r.status_code))
print("text \n" + r.text)
print("json \n" + json.dumps(r.json()))
The Twitter v1 API is now defunct so I've been trying to use the Search and Streaming APIs to collate hashtag information. The Search API is rate limited, so if there are a lot of entries on a hashtag you will probably miss some. Streaming seemed like the way to go.
Using OAuth2 here is my (anonymized) code:
import oauth2 as oauth
import json
consumer_key = "<consumer key from twitter developer site>"
consumer_secret = "<consumer secret>"
oauth_token = "<access token>"
oauth_token_secret = "<access token secret>"
consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
access_token = oauth.Token(key=oauth_token, secret=oauth_token_secret)
client = oauth.Client(consumer, access_token)
terms = json.dumps({'track' : 'twitter'})
stream_endpoint = "https://stream.twitter.com/1.1/statuses/filter.json"
response, data = client.request(stream_endpoint,"POST", body=terms, headers={'Content-Type':'application/json'})
The issue I run into is this always returns the following message:
>>>'No filter parameters found. Expect at least one parameter: follow track locations\r\n'
I think your error is becouse using JSON data on
terms = json.dumps({'track' : 'twitter'})
You should write your code just like this
terms = 'track=twitter'
USER = request.params.get('username', '00000')
LIMIT = request.params.get('limit', '50')
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
consumer_key ='424245wfdsfa4'
consumer_secret ='afar234252523adsasd'
if consumer_key is None or consumer_secret is None:
print 'you need consumer_key & consumer_secret key'
sys.exit(1)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
response, content = oauth_client.request(REQUEST_TOKEN_URL, 'POST')
if response['status'] == '200':
request_token = dict(parse_qsl(content))
else:
print 'Invalid response from Twitter requesting token.........: %s' % response['status']
endpoint = 'https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name=' + USER + '&count=' + LIMIT
response, content = oauth_client.request(endpoint, 'GET')
url = response['content-location']
f = urllib2.urlopen(url)
response = f.read()
return simplejson.loads(response)