Using Python and OAuth2 with Twitter streaming API - python

The Twitter v1 API is now defunct so I've been trying to use the Search and Streaming APIs to collate hashtag information. The Search API is rate limited, so if there are a lot of entries on a hashtag you will probably miss some. Streaming seemed like the way to go.
Using OAuth2 here is my (anonymized) code:
import oauth2 as oauth
import json
consumer_key = "<consumer key from twitter developer site>"
consumer_secret = "<consumer secret>"
oauth_token = "<access token>"
oauth_token_secret = "<access token secret>"
consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
access_token = oauth.Token(key=oauth_token, secret=oauth_token_secret)
client = oauth.Client(consumer, access_token)
terms = json.dumps({'track' : 'twitter'})
stream_endpoint = "https://stream.twitter.com/1.1/statuses/filter.json"
response, data = client.request(stream_endpoint,"POST", body=terms, headers={'Content-Type':'application/json'})
The issue I run into is this always returns the following message:
>>>'No filter parameters found. Expect at least one parameter: follow track locations\r\n'

I think your error is becouse using JSON data on
terms = json.dumps({'track' : 'twitter'})
You should write your code just like this
terms = 'track=twitter'

USER = request.params.get('username', '00000')
LIMIT = request.params.get('limit', '50')
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
consumer_key ='424245wfdsfa4'
consumer_secret ='afar234252523adsasd'
if consumer_key is None or consumer_secret is None:
print 'you need consumer_key & consumer_secret key'
sys.exit(1)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
response, content = oauth_client.request(REQUEST_TOKEN_URL, 'POST')
if response['status'] == '200':
request_token = dict(parse_qsl(content))
else:
print 'Invalid response from Twitter requesting token.........: %s' % response['status']
endpoint = 'https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name=' + USER + '&count=' + LIMIT
response, content = oauth_client.request(endpoint, 'GET')
url = response['content-location']
f = urllib2.urlopen(url)
response = f.read()
return simplejson.loads(response)

Related

Python api for copy trading on bitget

I’ve quite recently found this feature on Bitget which enables users to essentially copy other ranked traders. This feature comes with a corresponding api documentation. But after going through it im more confused than ever. Firstly, im trying to obtain the historical data trading data of specific traders which are available data on their “orders tab” from the website (shown in excerpt above). I reckon this is possible from the following get request from the documentation: “GET /api/mix/v1/trace/waitProfitDateList”.
Based on the above http request from i have produced the following python code below. The request response is 403. Help a fellow novice
import requests
import hmac
import base64
import hashlib
import json
import time
def sign(message, secret_key):
mac = hmac.new(bytes(secret_key, encoding='utf8'), bytes(message, encoding='utf-8'), digestmod='sha256')
d = mac.digest()
return base64.b64encode(d).decode('utf-8')
def pre_hash(timestamp, method, request_path, query_string, body):
return str(timestamp) + str.upper(method) + request_path + query_string + body
if __name__ == '__main__':
params = {
"pageSize": 10,
"pageNo": 1
}
rest_url = "https://api.bitget.com"
secret_key = ""
api_key = ""
passphrase = ""
timestamp = int(time.time_ns() / 1000000);
query_string = '&'.join([f'{k}={v}' for k, v in params.items()])
message = pre_hash(timestamp, 'GET', '/api/mix/v1/trace/waitProfitDateList', "?"+query_string,"")
sign = sign(message, secret_key)
headers = {
"ACCESS-KEY":api_key,
"ACCESS-SIGN":sign,
"ACCESS-TIMESTAMP":str(timestamp),
"ACCESS-PASSPHRASE":passphrase,
"Content-Type":"application/json",
"locale":"en-US"
}
response = requests.get(rest_url, headers=headers, params=params)
if response.status_code == 200:
result = response.json()
print(result)
else:
print(response.status_code)

How to access non_public_metrics field in twitter API?

I'm trying to retrieve the information within the non_public_metrics field in twitter API (i.e, "impression_count", "url_link_clicks", "user_profile_clicks"). I was able to access the public_metrics field using only the Bearer Token. But, when I include the non_public_metrics in my query params I got the error Field Authorization Error. Here is my code:
import requests
import collections
import os
from dotenv import load_dotenv
load_dotenv()
def auth():
return os.getenv('TWITTER_TOKEN')
def create_headers(bearer_token):
headers = {"Authorization": "Bearer {}".format(bearer_token)}
return headers
def create_url(keyword, start_date, end_date, max_results = 10):
ttid = 1184334528837574656
search_url = f"https://api.twitter.com/2/users/{ttid}/tweets" #Change to the endpoint you want to collect data from
#change params based on the endpoint you are using
query_params = {'start_time': start_date,
'end_time': end_date,
'max_results': max_results,
'tweet.fields': 'public_metrics,created_at,non_public_metric',#remove non_public_metric and the code will work
'next_token': {}}
return (search_url, query_params)
def connect_to_endpoint(url, headers, params, next_token = None):
params['next_token'] = next_token #params object received from create_url function
response = requests.request("GET", url, headers = headers, params = params)
print("Endpoint Response Code: " + str(response.status_code))
if response.status_code != 200:
raise Exception(response.status_code, response.text)
return response.json()
def flatten(d, parent_key='', sep='_'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
#Inputs for the request
bearer_token = auth()
headers = create_headers(bearer_token)
keyword = "xbox lang:en"
start_time = "2021-12-01T00:00:00.000Z"
end_time = "2021-12-22T00:00:00.000Z"
max_results = 100
url = create_url(keyword, start_time,end_time, max_results)
json_response = connect_to_endpoint(url[0], headers, url[1])
print(json_response['data']) #if non_public_metrics is included, this throws a error
Then I read in Twitter Docs that I need to use OAuth1.0 authorization in order to access the field non_public_metrics. I tried to use one of the sample codes available in twitter-dev GH'page that uses OAuth1.0 authentication. Here is the snippet I used:
from requests_oauthlib import OAuth1Session
import os
import json
from dotenv import load_dotenv
load_dotenv()
consumer_key = os.getenv("CONSUMER_KEY")
consumer_secret = os.getenv("CONSUMER_SECRET")
#I actually used an ID associate to my account, not this one
params = {"ids": "1184334528837574656", "tweet.fields": "public_metrics,created_at,non_public_metrics"}
request_token_url = "https://api.twitter.com/oauth/request_token"
oauth = OAuth1Session(consumer_key, client_secret=consumer_secret)
try:
fetch_response = oauth.fetch_request_token(request_token_url)
except ValueError:
print(
"There may have been an issue with the consumer_key or consumer_secret you entered."
)
resource_owner_key = fetch_response.get("oauth_token")
resource_owner_secret = fetch_response.get("oauth_token_secret")
print("Got OAuth token: %s" % resource_owner_key)
# Get authorization
base_authorization_url = "https://api.twitter.com/oauth/authorize"
authorization_url = oauth.authorization_url(base_authorization_url)
print("Please go here and authorize: %s" % authorization_url)
verifier = input("Paste the PIN here: ")
# Get the access token
access_token_url = "https://api.twitter.com/oauth/access_token"
oauth = OAuth1Session(
consumer_key,
client_secret=consumer_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
verifier=verifier,
)
oauth_tokens = oauth.fetch_access_token(access_token_url)
access_token = oauth_tokens["oauth_token"]
access_token_secret = oauth_tokens["oauth_token_secret"]
# Make the request
oauth = OAuth1Session(
consumer_key,
client_secret=consumer_secret,
resource_owner_key=access_token,
resource_owner_secret=access_token_secret,
)
response = oauth.get(
"https://api.twitter.com/2/tweets", params=params
)
if response.status_code != 200:
raise Exception(
"Request returned an error: {} {}".format(response.status_code, response.text)
)
print("Response code: {}".format(response.status_code))
json_response = response.json()
print(json.dumps(json_response, indent=4, sort_keys=True))
This snippet, however, leads me to a similar error "Sorry, you are not authorized to access 'non_public_metrics.impression_count' on the Tweet with ids. Besides, this snippet has the huge incovenient of ask me to click a link and generate a PIN every time I need to request information for a particular tweet.
How can I properly request information on non_public_metrics field for my tweets?
One can retrieve the information within the non_public_metrics field using the url https://api.twitter.com/2/tweets/[YOU_TWEET_ID]?tweet.fields=non_public_metrics in Postman. To do the same in python just use the following snippet:
import os
from requests_oauthlib import OAuth1
import requests
from dotenv import load_dotenv
load_dotenv()
YOUR_TWEET_ID = ''
url = f'https://api.twitter.com/2/tweets/{YOUR_TWEET_ID}?tweet.fields=public_metrics,non_public_metrics'
CONSUMER_KEY=os.getenv('CONSUMER_KEY')
CONSUMER_SECRET=os.getenv('CONSUMER_SECRET')
ACCESS_TOKEN=os.getenv('ACCESS_TOKEN')
ACCESS_SECRET=os.getenv('ACCESS_SECRET')
headeroauth = OAuth1(CONSUMER_KEY, CONSUMER_SECRET,ACCESS_TOKEN, ACCESS_SECRET, signature_type='auth_header')
r = requests.get(url, auth=headeroauth)
print(r.json())

Every 1 minute, generate a report based only on the data tweeted in last 5 minutes

My code gives continuous data, but I wanted to filter the data to last five minutes. Additionally, I wanted to report it every 1 minute. What I need to do for that?
try:
import json
except ImportError:
import simplejson as json
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
ACCESS_TOKEN = 'secret'
ACCESS_SECRET = 'secret'
CONSUMER_KEY = 'secret'
CONSUMER_SECRET = 'secret'
oauth = OAuth(ACCESS_TOKEN, ACCESS_SECRET, CONSUMER_KEY, CONSUMER_SECRET)
twitter_stream = TwitterStream(auth=oauth)
iterator = twitter_stream.statuses.filter(track="car", language="en")
for tweet in iterator:
try:
if 'text' in tweet:
print tweet['user']['name']
print tweet['user']['statuses_count']
# print '\n'
for hashtag in tweet['entities']['hashtags']:
hashtags.append(hashtag['text'])
print hashtags
except:
continue
Thanks in advance.

Reddit to Twitter bot having issues with key-dictionary calls

So I'm following the tutorial of a certain reddit to twitter bot thats coded in python using PRAW and I am stuck hitting errors.
Running this code in the command console gives me the error on line 74
import praw
import json
import requests
import tweepy
import time
access_token = 'secret'
access_token_secret = ' secret'
consumer_key = 'secret'
consumer_secret = 'secret'
def strip_title(title):
if len(title) < 94:
return title
else:
return title[:93] + "..."
def tweet_creator(subreddit_info):
post_dict = {}
post_ids = []
print "[bot] Getting posts from Reddit"
for submission in subreddit_info.get_hot(limit=20):
post_dict[strip_title(submission.title)] = submission.url
post_ids.append(submission.id)
print "[bot] Generating short link using goo.gl"
mini_post_dict = {}
for post in post_dict:
post_title = post
post_link = post_dict[post]
short_link = shorten(post_link)
mini_post_dict[post_title] = short_link
return mini_post_dict, post_ids
def setup_connection_reddit(subreddit):
print "[bot] setting up connection with Reddit"
r = praw.Reddit('yasoob_python reddit twitter bot '
'monitoring %s' %(subreddit))
subreddit = r.get_subreddit(subreddit)
return subreddit
def shorten(url):
headers = {'content-type': 'application/json'}
payload = {"longUrl": url}
url = "https://www.googleapis.com/urlshortener/v1/url"
r = requests.post(url, data=json.dumps(payload), headers=headers)
link = json.loads(r.text)
return link
def duplicate_check(id):
found = 0
with open('posted_posts.txt', 'r') as file:
for line in file:
if id in line:
found = 1
return found
def add_id_to_file(id):
with open('posted_posts.txt', 'a') as file:
file.write(str(id) + "\n")
def main():
subreddit = setup_connection_reddit('showerthoughts')
post_dict, post_ids = tweet_creator(subreddit)
tweeter(post_dict, post_ids)
def tweeter(post_dict, post_ids):
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
for post, post_id in zip(post_dict, post_ids):
found = duplicate_check(post_id)
if found == 0:
print "[bot] Posting this link on twitter"
print post+" "+post_dict[post]+" #Python #reddit #bot"
api.update_status(post+" "+post_dict[post]+" #Python #reddit #bot")
add_id_to_file(post_id)
time.sleep(30)
else:
print "[bot] Already posted"
if __name__ == '__main__':
main()
Error :
print post+" "+post_dict[post]+"#python #reddit #bot"
TypeError: coercing to Unicode: need string or buffer, dict found
My understanding of the code and error is that it needs a string to be send but is somehow getting the entire key-dictionary set. I thought by sending the [post] parameter into post_dict that it will be able to get the certain post for the bot to utalize, but instead, its fetching the dictionary!
There are two lines, 74 and 75 that both call post_dict[post] and is not utalizing the dictionary's value when calling post key.
Try printing post and post_dict before you call that concatenation in the the tweeter function's For loop. That should show you what those structs look like and make the solution evident.

OAuth Handshake Copy Error

I am trying to build a PoC with the API REST of Copy but I have a problem when I try to get the ACCESS TOKEN:
Message: oauth_problem=signature_invalid&debug_sbs=GET&https%3A%2F%2Fapi.copy.com%...
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
# Create your consumer with the proper key/secret.
consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET)
print "Consumer: ", consumer
client = oauth.Client(consumer)
url = access_url + "?oauth_verifier=%s&oauth_token=%s" % (oauth_verifier, oauth_token)
print url
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
I would appreciate any help.
I have been able to solve my own issue. The problem was the creation of a new consumer (I had one for the first step of the oauth handshake) and not using the oauth.Token provided by the library (I put the oauth_verifier and the oauth_token with a workaround)
The solution:
#app.route('/get_access_token')
def get_access_token():
print "Get Access Token"
try:
oauth_verifier = request.args['oauth_verifier']
oauth_token = request.args['oauth_token']
print oauth_token + " & " + oauth_verifier
token = oauth.Token(oauth_token, request_token_secret) # request_token_secret is global
token.set_verifier(oauth_verifier)
client = oauth.Client(consumer, token) #consumer is global
url = "https://api.copy.com/oauth/access"
resp, content = client.request(url, "GET")
print "Resp: ", resp
print "Content: ", content
return content
except Exception as e:
return e.message()

Categories