import time
from TwitterAPI import TwitterAPI
import requests
from requests_oauthlib import OAuth1
from urlparse import parse_qs
# application's key and key secret
ck = ''
cs = ''
# obtain request token
oauth = OAuth1(ck, cs)
r = requests.post(url='https://api.twitter.com/oauth/request_token', auth=oauth)
credentials = parse_qs(r.content)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
#obtain authorization from twitter user
print('Visit this link to authorize the TweetBot:\n https://api.twitter.com/oauth/authorize?oauth_token=%s' % request_key)
verifier = raw_input('Enter your verification code: ')
# obtain access token
oauth = OAuth1(ck, cs, request_key, request_secret, verifier=verifier)
r = requests.get(url='https://api.twitter.com/oauth/access_token', auth=oauth)
credentials = parse_qs(r.content)
tk = credentials.get('oauth_token')[0]
ts = credentials.get('oauth_token_secret')[0]
# access TwitterAPI with the obtained access
api = TwitterAPI(ck, cs, tk, ts)
f = open('tweetbot.txt', 'rU')
for line in f:
r = api.request('statuses/update', {'status' : line})
print line,
print r.status_code
time.sleep(600)
I have this code but I want it to add multiple twitter accounts. I guess I'd need to loop the input and store them in different variables? Please give me the best solution. I'm not that advanced with python.
Alright #user3392493, what you need is a while loop, but with several if statements inside and a counter variable, so the variables are set based on what number the counter is at:
enoughaccounts = False
counter = 0
while not enoughaccounts:
counter += 1
if counter == 1:
# Do Everything
account1 = .....
if counter == 2:
# Do Everything
account2 = .....
if counter == 3:
# Do Everything
account3 = .....
if counter == 4:
# Do Everything
account4 = .....
moreaccounts = input('Add another account? (type yes or no)')
if moreaccounts == 'no' or moreaccounts == 'No' or moreaccounts == 'NO':
enoughaccounts = True
This should do what you want it to, if you need anything else just ask :)
Related
I am going to make a simple die bot using twiffy and apiv2.
When my dice bot mentions Y/N, it answers either Yes or No with a mention.
It works well with public accounts, but it does not load mentions from private accounts.
What should I do?
import tweepy
import random
import time
def main():
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
bearer_token = ""
client = tweepy.Client(consumer_key=consumer_key, consumer_secret=consumer_secret, access_token=access_token, access_token_secret=access_token_secret, bearer_token=bearer_token)
id = ''
last_mention = 0
yesno = ['Y', 'N']
while True:
mentions = client.get_users_mentions(id=id, since_id = last_mention).data
print(mentions)
if mentions:
for mention in mentions:
tweet_id = mention.id
tweet_txt = mention.text
answer = 0
answer_txt = ''
if 'Y/N' in tweet_txt:
text = yesno[random.randrange(0, 2)]
client.create_tweet(text=text, in_reply_to_tweet_id=tweet_id)
last_mention = tweet_id
continue
time.sleep(5)
else:
time.sleep(5)
pass
if __name__ == "__main__":
main()
I read the user field of the tweepy document, but there was no extension using 'protected' field.
I currently have a script pulling data from Instagram that looks like the code block posted below. As long as you enter your plug in your Instagram credentials under user_name and password, it should be fully reproducible.
It is taking the account listed in player_df, pulling a list of all their followers on Instagram, and taking that list of followers and pulling all of their bio information. But when I run it, I get the following error:
ClientConnectionError: timeout The read operation timed out
You can find the entire error log here, I just didn't want to post it in the original question because it would exceed the character limit.
As an attempt to fix this, I added in the sleep(300) functions to lessen the stress between API calls, but that doesn't seem to do the trick. What would be the best way to get around this so it doesn't timeout while trying to run?
from ftplib import error_proto
from hashlib import new
from multiprocessing.spawn import import_main_path
from time import sleep
from instagram_private_api import Client, ClientCompatPatch
from operator import itemgetter
import pandas as pd
import json
import requests
from collections import Counter
import datetime
import os.path
user_name = "XXXXX"
password = "XXXXX"
players = [['hannahkshepherd', '201683404']]
player_df = pd.DataFrame(players, columns=['username', 'userId'])
def pull_followers(username_instagram, userid_instagram):
followers = []
combinacao = []
results = api.user_followers(userid_instagram, rank_token=api.generate_uuid())
followers.extend(results.get('users', []))
next_max_id = results.get('next_max_id')
while next_max_id:
results = api.user_followers(userid_instagram, rank_token=api.generate_uuid(), max_id=next_max_id)
followers.extend(results.get('users', []))
next_max_id = results.get('next_max_id')
userid = [followers[i]['pk'] for i in range(0,len(followers))]
full_names = [followers[i]['full_name'] for i in range(0,len(followers))]
usernames = [followers[i]['username'] for i in range(0,len(followers))]
profile_pic_url = [followers[i]['profile_pic_url'] for i in range(0,len(followers))]
followers_text = ['follower' for i in range(0,len(followers))]
following_username = [str(username_instagram) for i in range(0,len(followers))]
following_userid = [str(userid_instagram) for i in range(0,len(followers))]
combinacao.extend([list(i) for i in zip(userid, full_names,
usernames, profile_pic_url, followers_text,
following_username, following_userid)])
combinacao = sorted(combinacao, key=itemgetter(2), reverse=False)
return combinacao
all_followers = []
for i in range(len(player_df)):
all_followers += pull_followers(player_df['username'][i], player_df["userId"][i])
def get_bios(followers):
bios = []
for follower in followers:
follower_id = follower[0]
bios += [[follower_id, api.user_info(follower_id)['user']['biography']]]
return bios
#sleep(300)
bios = get_bios(all_followers)
#sleep(300)
def print_bios():
s = ''
for row in bios:
s += '\n' + 'user_id: ' + str(row[0]) + ', bio: ' + str(row[1])
print(s)
I have made a simple chat system with python-requests. There are two different files one is the sender and another is the receiver. the main concept of these two files is
1. sender file contains a while loop which always takes the message as input. after
giving the message as input, it sends the message to a website.
2. receiver file also contains a while loop which gets requests from the website after every
5 seconds.
Now I want to run these two different works in the same window with Tkinter. how to do it? Thanks in advance.
Sender.py Code is here
import configme as con
import requests
import datetime
from cryptography.fernet import Fernet
nam = con.my_name
cookies_dict = con.cookie
key = con.crypto_key
url = con.base_url + '/config.php'
def makeID():
return datetime.datetime.now().timestamp()
# encription staff
fernet = Fernet(key)
# member joining message
if nam.__len__() != 0:
requests.get(url+f"?iD={makeID()}&name=<<<>>>&msg={nam} join the room.", cookies=cookies_dict)
with requests.Session() as r:
while True:
msg = input("Enter your Messege: ")
if msg == ".exit":
# r.get(url+f"?iD={makeID()}&name=<<<>>>&msg={nam} has left the room.", cookies=cookies_dict)
break
else:
encMessage = fernet.encrypt(msg.encode())
messenger = {'iD': makeID() ,'name': nam , 'msg': encMessage}
if msg != "":
r.get(url, params=messenger, cookies=cookies_dict)
Receiver.py code here...
import configme as con
import requests
import json
from cryptography.fernet import Fernet
from time import sleep
from datetime import datetime
from pytz import timezone
import pytz
cookies_dict = con.cookie
ozone = con.my_timezone
key = con.crypto_key
time_format = con.date_time_format
url = con.base_url + '/log.json'
t = con.receive_time
# encription staff
fernet = Fernet(key)
timezone = timezone(ozone)
def setTime(t):
stamptime = int(float(t))
GMT0 = pytz.utc.localize(datetime.utcfromtimestamp(stamptime))
return GMT0.astimezone(timezone).strftime(time_format)
j = 0
while True:
r = requests.get(url, cookies=cookies_dict).text
message = json.loads(r)
message_sz = len(message)
if message_sz == 0:
print("Looks like there are no message")
break
for msg in message[j:]:
local_time = setTime(msg['id'])
if msg['nam'] == '<<<>>>':
print(f"{local_time} :: {msg['nam']} :: {msg['msg']}")
else:
decMessage = fernet.decrypt(bytes(msg['msg'], "utf-8")).decode()
print(f"{local_time} :: {msg['nam']} :: {decMessage}")
j = message_sz
sleep(t)
I would not suggest using this checking and going to website method, but you could thread the while loops to go at the same time. And you could update tk when you want using tk.update().
You could get Data from vars that the threaded loops are setting and use them in your single tk window.
use multi threading .or else load data desperately
Below is my try to create a username availability checker with proxies, so far it works as intended
the only thing is that its slow, i tried to implement threads but no different as im not sure if im doing it right or not.
used concurrent.futures and threading libraries.
Is there a better way to code this kind of programs or are there any other suggestions?
Thanks in advance
import requests
import json
import ctypes
import colorama
from colorama import Fore
from datetime import datetime
import os
os.system("cls")
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
colorama.init()
url = "https://link"
def grab_proxies():
proxylist = []
prx = open('proxy.txt','r')
prx = prx.readlines()
for proxy in prx:
proxy = proxy.rstrip("\n")
proxylist.append(proxy)
return proxylist
prlist = grab_proxies()
def grab_usernames():
userlist = []
users = open('userlist.txt','r')
users = users.readlines()
for user in users:
user = user.rstrip("\n")
userlist.append(user)
return userlist
ulist = grab_usernames()
found = 0
pc = 0
uc = 0
for i in range(0,len(prlist)):
ctypes.windll.kernel32.SetConsoleTitleW(f"[# Checker] | Counter: %s - Found: %s - Current Proxy: %s - Started at: %s" % (i, found, prlist[pc], current_time))
try:
req = requests.post(url,headers=headers, data = {"requested_username": ulist[uc], "xsrf_token": "F0kpyvjJgeBtsOk5Gl6Jvg"},proxies={'http' : prlist[pc],'https': prlist[pc]}, timeout=2)
response = req.json()
#print(response,req.status_code)
#print(response)
#print(type(response))
if(response['reference']['status_code'] == 'TAKEN'):
#rd = response['errors']['username'][0]['code']
print(f'{Fore.LIGHTBLACK_EX}[{Fore.LIGHTRED_EX}Taken{Fore.LIGHTBLACK_EX}]{Fore.LIGHTCYAN_EX} {ulist[uc]}')
#print(ulist[uc]+" Taken")
uc+=1
elif(response['reference']['status_code'] == 'OK'):
print(f'{Fore.LIGHTBLACK_EX}[{Fore.LIGHTGREEN_EX}Available{Fore.LIGHTBLACK_EX}]{Fore.LIGHTCYAN_EX} {ulist[uc]}')
#print(ulist[uc]+" Available")
f = open("found.txt","a")
f.write(ulist[uc]+"\n")
f.close()
found+=1
uc+=1
elif(response['reference']['status_code'] == 'INVALID_BEGIN'):
print(f'{Fore.LIGHTBLACK_EX}[{Fore.LIGHTRED_EX}Invalid Username{Fore.LIGHTBLACK_EX}]{Fore.LIGHTCYAN_EX} {ulist[uc]}')
uc+=1
elif(response['reference']['status_code'] == 'DELETED'):
print(f'{Fore.LIGHTBLACK_EX}[{Fore.LIGHTRED_EX}Deleted{Fore.LIGHTBLACK_EX}]{Fore.LIGHTCYAN_EX} {ulist[uc]}')
uc+=1
else:
print(response)
except:
#print(prlist[pc]+ " Going to next proxy")
pc+=1
pass
#break
x = input("Finished!.. press enter to exit")
You could use https://github.com/encode/requests-async to do your requests in an async way
I am currently using OAuth 1 for auth with the Reporting API with GData and Python. Is it possible to use OAuth 2, I can't find a reference that this is doable?
I wasn't able to find any reference for the OAuth 2 and the Reporting api but by following samples for the GData libraries (http://code.google.com/p/gdata-python-client/source/browse/#hg%2Fsamples%2Fapps) I was able to cobble this together:
#!/usr/bin/python
import sys
import os
import time
import gdata.gauth
import gdata.client
import httplib2
import oauth2client.file
import oauth2client.tools
REPORTING_URI = 'https://www.google.com/hosted/services/v1.0/reports/ReportingData'
REPORTING_XML_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<rest xmlns="google:accounts:rest:protocol"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<type>Report</type>
<domain>%s</domain>
<date>%s</date>
<page>%s</page>
<reportType>daily</reportType>
<reportName>%s</reportName>
</rest>'''
OAUTH2FILENAME = 'oauth_20.dat'
OAUTH2JSONFILE = 'client_secrets.json'
OAUTH2SCOPES = 'https://www.google.com/hosted/services/v1.0/reports/ReportingData'
OAUTH2USERAGENT = 'REPORTING'
CLIENTSOURCE = 'REPORTING'
MISSING_OAUTHJSON_FILE_MESSAGE = """
WARNING: Please configure OAuth 2.0
To continue you will need to populate the client_secrets.json file:
%s
with information from the APIs Console <https://code.google.com/apis/console>.
""" % os.path.join(os.path.dirname(__file__), OAUTH2JSONFILE)
### Reporting
def RunReport (http_object, domain, report=None, date=None):
if date is None:
now = time.time()
report_time = time.gmtime(now)
date = time.strftime("%Y-%m-%d",report_time)
if report is None:
report='accounts'
report_data = RequestReport(http_object,domain=domain,report=report,date=date)
if not report_data:
print 'No report data'
return report_data
def RequestReport (http_object, domain=None, report=None, date=None):
"""Retrieves a report
Args:
domain: string
report: string: accounts, activity, disk_space, email_clients, summary
date: string: YYYY-MM-DD
Returns:
String, the report data
"""
report_data = ''
uri = REPORTING_URI
if not report or report is None:
return report_data
if not date or date is None:
return report_data
if not domain or domain is None:
domain = self.domain
page = 1
while True:
report_xml = REPORTING_XML_TEMPLATE %(domain, date, page, report)
response = ''
report_page = ''
try:
response, report_page = http_object.request(
uri,method='POST',body=report_xml)
except Exception, rexcept:
print 'Exception: ',rexcept
report_page = ''
break
if response.status != 200:
print 'Error: ',response.status
report_page = ''
break
if not report_page or report_page == 'End-Of-Report':
break
else:
report_data += report_page
page = page + 1
return report_data
scopes = OAUTH2SCOPES
user_agent = OAUTH2USERAGENT
client_source = CLIENTSOURCE
str_oauth2file = OAUTH2FILENAME
str_oauthjsonfile = OAUTH2JSONFILE
domain = 'somedomain'
report_name = 'accounts'
client_id = 'string'
client_secret = 'string'
report_data = ''
oauth2_flow = ''
now = time.time()
report_time = time.gmtime(now)
report_date = time.strftime("%Y-%m-%d",report_time)
if not os.path.isfile(str_oauth2file):
token = gdata.gauth.OAuth2Token(client_id=client_id,
client_secret=client_secret, scope=scopes, user_agent=user_agent)
uri = token.generate_authorize_url()
print 'Please visit this URL to authorize the application:'
print uri
# Get the verification code from the standard input.
code = raw_input('What is the verification code? ').strip()
token.get_access_token(code)
oauth2_flow = oauth2client.client.flow_from_clientsecrets(str_oauthjsonfile,
scope=scopes,message=MISSING_OAUTHJSON_FILE_MESSAGE)
storage = oauth2client.file.Storage(str_oauth2file)
oauth2_credentials = storage.get()
if oauth2_credentials is None or oauth2_credentials.invalid:
if not oauth2_flow:
oauth2_flow = oauth2client.client.flow_from_clientsecrets(str_oauthjsonfile,
scope=scopes,message=MISSING_OAUTHJSON_FILE_MESSAGE)
print '\nYou must authorize access to the request APIS.\n'
# Save the credentials in storage to be used in subsequent runs.
oauth2_credentials = oauth2client.tools.run(oauth2_flow, storage)
http_oauth2_object = httplib2.Http()
http_oauth2_object = oauth2_credentials.authorize(http_oauth2_object)
report_data = RunReport(
http_oauth2_object,domain,report=report_name,date=report_date)
if report_data:
print report_data
sys.exit(0)