I was trying to improve my friend's Python 'Twitch account checker' (basically gets a list of usernames from a text file and checks if they're available or taken on Twitch.tv). I was going to improve it in a way that it would output the available usernames into a text file (in the same location as the original list). I was actually searching Stack Overflow and found a post which 'explained' how to actually output a list (I put the available usernames into a separate list) into a text file.
When running the script, it works fine up to the part where it's supposed to save the available usernames. Then, I get the following error:
Traceback (most recent call last):
File "multithreadtwitchchecker.py", line 44, in <module>
output_available_usernames('availableusernames.txt')
File "multithreadtwitchchecker.py", line 37, in output_available_usernames
AVAILABLE_USERNAMES = f.write(AVAILABLE_USERNAMES.split('\n'))
AttributeError: 'list' object has no attribute 'split'
Here's the code:
from multiprocessing.pool import ThreadPool
import re
import requests
import sys
try:
input = raw_input
except NameError:
pass
TWITCH_URL = "https://www.twitch.tv/{username}"
TWITCH_REGEX = re.compile(r"^[a-zA-Z0-9_]{4,25}$")
MAX_THREADS = 25
MESSAGES = {True: "Available", False: "Taken"}
AVAILABLE_USERNAMES = []
def read_valid_usernames(filename):
"""Reads a list of usernames and filters out invalid ones."""
try:
with open(filename, "r") as fin:
return [username for username in map(str.strip, fin) if TWITCH_REGEX.match(username)]
except IOError:
sys.exit("[!] '{}' - Invalid File".format(filename))
def username_available(username):
"""Checks if a 404 response code is given when requesting the profile. If it is, it is presumed to be available"""
try:
return username, requests.get(TWITCH_URL.format(username=username)).status_code == 404
AVAILABLE_USERNAMES.append(username)
except Exception as e:
print(e)
def output_available_usernames(filename):
"""Gets a filename to output to and outputs all the valid usernames to it"""
global AVAILABLE_USERNAMES
f = open(filename, 'w')
AVAILABLE_USERNAMES = f.write(AVAILABLE_USERNAMES.split('\n'))
usernames = read_valid_usernames(input("Enter path to list of usernames: "))
for username, available in ThreadPool(MAX_THREADS).imap_unordered(username_available, usernames):
print("{:<{size}}{}".format(username, MESSAGES.get(available, "Unknown"), size=len(max(usernames, key=len)) + 1))
output_available_usernames('availableusernames.txt')
Well, writing to a file can be done like this:
def output_available_usernames(filename):
global AVAILABLE_USERNAMES
with open(filename, 'w') as f:
for name in AVAILABLE_USERNAMES:
f.write(name + '\n')
As jonrsharpe said, split is going in the wrong direction.
However, your code has a deeper problem right now. You append to AVAILABLE_USERNAMES after the return statement, so that code never executes, and AVAILABLE_USERNAMES will always be empty. You instead want something like this:
def username_available(username):
"""Checks if a 404 response code is given when requesting the profile. If it is, it is presumed to be available"""
try:
if requests.get(TWITCH_URL.format(username=username)).status_code == 404:
AVAILABLE_USERNAMES.append(username)
return username, True
else:
return username, False
except Exception as e:
print(e)
Related
When I run this code, a NameError traceback error pops up, even though it should be handled by the exception. Why is that?
The function call argument is intentionally misspelled.
filename_cats = "cats.txt"
filename_dogs = "dogs.txt"
def readlines(filename):
"""read lines from a text file"""
try:
with open(filename) as f:
lines = f.readlines()
string = ''
for line in lines:
string += line
except (NameError, FileNotFoundError):
print(f"The file {filename} was not found.")
else:
print(string)
readlines(filename_cat)
It's because the error happens here:
👇
readlines(filename_cat) 👈
☝️
Not anywhere in here:
try:
with open(filename) as f:
lines = f.readlines()
string = ''
for line in lines:
string += line
except (NameError, FileNotFoundError):
A try..except block can only catch errors happening literally within it, not anything happening before or after it.
I'm trying to make my life easier on my work, and writing down errors and solutions for that same errors. The program itself works fine when it's about adding new errors, but then I added a function to verify if the error exists in the file and then do something to it (not added yet).
The function doesn't work and I don't know why. I tried to debug it, but still not able to find the error, maybe a conceptual error?
Anyway, here's my entire code.
import sys
import os
err = {}
PATH = 'C:/users/userdefault/desktop/errordb.txt'
#def open_file(): #Not yet used
#file_read = open(PATH, 'r')
#return file_read
def verify_error(error_number, loglist): #Verify if error exists in file
for error in loglist:
if error_number in loglist:
return True
def dict_error(error_number, solution): #Puts input errors in dict
err = {error_number: solution}
return err
def verify_file(): #Verify if file exists. Return True if it does
archive = os.path.isfile(PATH)
return archive
def new_error():
file = open(PATH, 'r') #Opens file in read mode
loglist = file.readlines()
file.close()
found = False
error_number = input("Error number: ")
if verify_error(error_number, loglist) == True:
found = True
# Add new solution, or another solution.
pass
solution = str(input("Solution: "))
file = open(PATH, 'a')
error = dict_error(error_number, solution)
#Writes dict on file
file.write(str(error))
file.write("\n")
file.close()
def main():
verify = verify_file() #Verify if file exists
if verify == True:
new = str.lower(input("New job Y/N: "))
if new == 'n':
sys.exit()
while new == 'y':
new_error()
new = str.lower(input("New job Y/N: "))
else:
sys.exit()
else:
file = open(PATH, "x")
file.close()
main()
main()
To clarify, the program executes fine, it don't return an error code. It just won't execute the way I'm intended, I mean, it supposed to verify if certain error number already exists.
Thanks in advance :)
The issue I believe you're having is the fact that you're not actually creating a dictionary object in the file and modifying it but instead creating additional dictionaries every time an error is added then reading them back as a list of strings by using the .readlines() method.
An easier way of doing it would be to create a dictionary if one doesn't exist and append errors to it. I've made a few modifications to your code which should help.
import sys
import os
import json # Import in json and use is as the format to store out data in
err = {}
PATH = 'C:/users/userdefault/desktop/errordb.txt'
# You can achieve this by using a context manager
#def open_file(): #Not yet used
#file_read = open(PATH, 'r')
#return file_read
def verify_error(error_number, loglist): #Verify if error exists in file
# Notice how we're looping over keys of your dictionary to check if
# an error already exists.
# To access values use loglist[k]
for k in loglist.keys():
if error_number == k:
return True
return False
def dict_error(loglist, error_number, solution): #Puts input errors in dict
# Instead of returning a new dictionary, return the existing one
# with the new error appended to it
loglist[error_number] = solution
return loglist
def verify_file(): #Verify if file exists. Return True if it does
archive = os.path.isfile(PATH)
return archive
def new_error():
# Let's move all the variables to the top, makes it easier to read the function
# Changes made:
# 1. Changed the way we open and read files, now using a context manager (aka with open() as f:
# 2. Added a json parser to store in and read from file in a json format. If data doesn't exist (new file?) create a new dictionary object instead
# 3. Added an exception to signify that an error has been found in the database (this can be removed to add additional logic if you'd like to do more stuff to the error, etc)
# 4. Changed the way we write to file, instead of appending a new line we now override the contents with a new updated dictionary that has been serialized into a json format
found = False
loglist = None
# Open file as read-only using a context manager, now we don't have to worry about closing it manually
with open(PATH, 'r') as f:
# Lets read the file and run it through a json parser to get a python dictionary
try:
loglist = json.loads(f.read())
except json.decoder.JSONDecodeError:
loglist = {}
error_number = input("Error number: ")
if verify_error(error_number, loglist) is True:
found = True
raise Exception('Error exists in the database') # Raise exception if you want to stop loop execution
# Add new solution, or another solution.
solution = str(input("Solution: "))
# This time open in write only and replace the dictionary
with open(PATH, 'w') as f:
loglist = dict_error(loglist, error_number, solution)
# Writes dict on file in json format
f.write(json.dumps(loglist))
def main():
verify = verify_file() #Verify if file exists
if verify == True:
new = str.lower(input("New job Y/N: "))
if new == 'n':
sys.exit()
while new == 'y':
new_error()
new = str.lower(input("New job Y/N: "))
else:
sys.exit()
else:
with open(PATH, "x") as f:
pass
main()
main()
Note that you will have to create a new errordb file for this snippet to work.
Hope this has helped somehow. If you have any further questions hit me up in the comments!
References:
Reading and Writing files in Python
JSON encoder and decoder in Python
I think that there may be a couple of problems with your code, but the first thing that I noticed was that you are saving Error Numbers and Solutions as a dictionary in errorsdb.txt and when you read them back in you are reading them back in as a list of strings:
The line:
loglist = file.readlines()
in new_error returns a list of strings. This means that verify_error will always return False.
So you have a couple of choices:
You could modify verify_error to the following:
def verify_error(error_number, loglist): #Verify if error exists in file
for error in loglist:
if error_number in error:
return True
Although, I think that a better solution would be to load errorsdb.txt as a JSON file and then you'll have a dictionary. That would look something like:
import json
errordb = {}
with open(PATH) as handle:
errordb = json.load(handle)
So here are the full set of changes I would make:
import json
def verify_error(error_number, loglist): #Verify if error exists in file
for error in loglist:
if error_number in error:
return True
def new_error():
errordb = list()
exitsting = list()
with open(PATH) as handle:
existing = json.load(handle)
errordb += existing
error_number = input("Error number: ")
if verify_error(error_number, errordb) == True:
# Add new solution, or another solution.
print("I might do something here.")
else:
solution = str(input("Solution: "))
errordb.append({error_number, solution})
#Writes dict on file
with open(PATH, "w") as handle:
json.dump(errordb, handle)
In book headfirstpython in chapter4 they have used the syntax
print(list_name, file= output_file_name)
For them it's working fine, but for me it's giving syntax error on file = output_file_name. The python version is same i.e. 3.
code:
import os
man = []
other = []
try:
data = open('sketch.txt')
for each_line in data:
try:
(role, line_spoken) = each_line.split(':', 1)
line_spoken = line_spoken.strip()
if role == 'Man':
man.append(line_spoken)
elif role == 'Other Man':
other.append(line_spoken)
except ValueError:
pass
data.close()
except IOError:
print('The datafile is missing!')
try:
man_file = open('man_data.txt', 'w')
other_file = open('other_data.txt', 'w')
print(man, file=man_file)
print(other, file=other_file)
except IOError:
print('File error.')
finally:
man_file.close()
other_file.close()
As per the help of print function indicates
file: a file-like object (stream); defaults to the current
sys.stdout.
So the input is not supposed to be file-name but rather a file-like object. If you want to write into (say) a text file, you need to first open it for writing and use the file handle.
f = open("output.txt",'w')
print(list_name, file=f)
I've been messing around with pickle for some days, trying to apply it in a High Score system in a 'Guess the number' exercise program. I thought that I had grasped the concept correctly, but now this error has appeared and I have no clue as to why.
Here's the relevant code:
def EnterHighScore(score,scoresList):
name = input("Enter your name: ")
newPlayer = player(name,score)
scoresList.append(newPlayer)
scoresFile = open('scores','wb')
pickle.dump(scoresList,scoresFile)
scoresFile.close()
for i in scoresList:
print(i.name + ' - ' + str(i.score))
def CheckHighScores(score):
try:
scoresFile = open('scores','rb')
except:
scoresFile = open('scores','wb+')
if not scoresFile.read(1):
scoresList = []
else:
scoresList = pickle.load(scoresFile)
scoresFile.close()
if not scoresList:
EnterHighScore(score,scoresList)
else:
for counter,i in enumerate(scoresList):
if counter == 3:
break
if score >= i.score:
EnterHighScore(score,scoresList)
break
When I run it, the first run through goes fine. That is, when the 'scores' file doesn't even exist. It gets created correctly, the scoresList is created empty and then filled with a player object and it gets dumped into the scoresFile without any errors. But when I try to load the scoresList with the new 'scores' file data, it gives me the following error:
UnpicklingError: Invalid load key'(heart)'
(heart) standing for an actual heart character.
I've read that others have had this problem, but in those cases they were trying to open the file in different OS's, or had modified the file in some way after pickling but before unpickling. In this case the file hasn't been modified at all, just written to and closed.
I've tried using pickle in other, simpler scenarios, and I haven't caused other errors.
Any help will be appreciated.
Your test to see if the file is empty advances the file read pointer past the start of the file:
if not scoresFile.read(1):
You'll have to seek back to the beginning:
if not scoresFile.read(1):
scoresList = []
else:
scoresFile.seek(0)
scoresList = pickle.load(scoresFile)
A much better test would be for you to catch the EOFError exception that pickle.load() throws if the file is empty:
try:
scoresList = pickle.load(scoresFile)
except EOFError:
# File empty
scoresList = []
Or you could catch the IOError when the file doesn't exist:
try:
with open('scores','rb') as scoresFile:
scoresList = pickle.load(scoresFile)
except IOError:
scoresList = []
and just not open a file for writing here.
I have the following fully functional, working code:
import urllib.request
import zipfile
url = "http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sop"
filename = "C:/test/archive.zip"
destinationPath = "C:/test"
urllib.request.urlretrieve(url,filename)
sourceZip = zipfile.ZipFile(filename, 'r')
for name in sourceZip.namelist():
sourceZip.extract(name, destinationPath)
sourceZip.close()
It will work perfect a few times, but because the server I am retrieving the file from has some limits, I get this error once I reach that daily limit:
Traceback (most recent call last):
File "script.py", line 11, in <module>
urllib.request.urlretrieve(url,filename)
File "C:\Python32\lib\urllib\request.py", line 150, in urlretrieve
return _urlopener.retrieve(url, filename, reporthook, data)
File "C:\Python32\lib\urllib\request.py", line 1591, in retrieve
block = fp.read(bs)
ValueError: read of closed file
How do I alter the script, so that it includes a list of multiple url's, instead of one single url, and the script keeps trying to download from the list until one succeeds, and then continues with the unzip. I just need one successful download.
Apologies for being very new to Python but I can't figure this one out. I'm assuming I have to change the variable to look something like this:
url = {
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soe",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sod",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soc",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sob",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soa",
}
and then changing this line into some sort of loop:
urllib.request.urlretrieve(url,filename)
You want to put your urls in a list, then loop through that list and try each one. You catch but ignore exceptions they throw, and break the loop once one succeeds. Try this:
import urllib.request
import zipfile
urls = ["http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sop", "other url", "another url"]
filename = "C:/test/test.zip"
destinationPath = "C:/test"
for url in urls:
try:
urllib.request.urlretrieve(url,filename)
sourceZip = zipfile.ZipFile(filename, 'r')
break
except ValueError:
pass
for name in sourceZip.namelist():
sourceZip.extract(name, destinationPath)
sourceZip.close()
import urllib.request
import zipfile
urllist = ("http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sop",
"another",
"yet another",
"etc")
filename = "C:/test/test.zip"
destinationPath = "C:/test"
for url in urllist:
try:
urllib.request.urlretrieve(url,filename)
except ValueError:
continue
sourceZip = zipfile.ZipFile(filename, 'r')
for name in sourceZip.namelist():
sourceZip.extract(name, destinationPath)
sourceZip.close()
break
This will work assuming you just want to try them each once until one works, then stop.
For a full-fledged distributed tasks you can checkout Celery and their retry mechanism Celery-retry
or you can have a look at Retry-decorator,
Example:
import time
# Retry decorator with exponential backoff
def retry(tries, delay=3, backoff=2):
"""Retries a function or method until it returns True.
delay sets the initial delay, and backoff sets how much the delay should
lengthen after each failure. backoff must be greater than 1, or else it
isn't really a backoff. tries must be at least 0, and delay greater than
0."""
if backoff <= 1:
raise ValueError("backoff must be greater than 1")
tries = math.floor(tries)
if tries < 0:
raise ValueError("tries must be 0 or greater")
if delay <= 0:
raise ValueError("delay must be greater than 0")
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay # make mutable
rv = f(*args, **kwargs) # first attempt
while mtries > 0:
if rv == True: # Done on success
return True
mtries -= 1 # consume an attempt
time.sleep(mdelay) # wait...
mdelay *= backoff # make future wait longer
rv = f(*args, **kwargs) # Try again
return False # Ran out of tries :-(
return f_retry # true decorator -> decorated function
return deco_retry # #retry(arg[, ...]) -> true decorator
urls = [
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soe",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sod",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soc",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2sob",
"http://url.com/archive.zip?key=7UCxcuCzFpYeu7tz18JgGZFAAgXQ2soa",
]
for u in urls:
urllib.request.urlretrieve(u,filename)
... rest of code ...