Python Trackback Issue - python

When I execute it says Trackback most recent call last and line 22 also line 410 in lib and so on with problems within python. Error on home = opener.open() Traceback (most recent call last): File "", line 1, in File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\site-packages\spyderlib\widgets\externalshell\sitecustomize.py", line 540, in runfile execfile(filename, namespace) File "C:/u/u/d/WinPython-32bit-2.7.6.2/python-2.7.6/Scripts/ox.py", line 20, in home = opener.open('', data) File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\urllib2.py", line 410, in open response = meth(req, response) File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\urllib2.py", line 523, in http_response 'http', request, response, code, msg, hdrs) File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\urllib2.py", line 448, in error return self._call_chain(*args) File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\urllib2.py", line 382, in _call_chain result = func(*args) File "C:\u\u\d\WinPython-32bit-2.7.6.2\python-2.7.6\lib\urllib2.py", line 531, in http_error_default raise HTTPError(req.get_full_url(), code, msg, hdrs, fp) urllib2.HTTPError: HTTP Error 500: Internal Server Error
import urllib
import urllib2
import cookielib
import re
import os
from random import choice
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [
("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:26.0) Gecko/20100101 Firefox/26.0"),
("Cookie", ".SECURITY=232")
]
f = 58454700
print "",
url = 'http://www.myhost.com/increment'
x = 0
while x < 1000000000:
f = f + 1
z = str(f)
url = 'http://www.myhost.com/Send/SentIncrement'
data = urllib.urlencode({"Increment": z, "VerificationToken": "Verified"})
home = opener.open('http://www.myhost.com/Send/SentIncrement', data)
os.system("cls")
print f
print data
x = x + 1
raw_input("")

Related

IOError: ('http error', 403, 'Forbidden', <httplib.HTTPMessage instance at 0x7f98ec3d92d8>)

I want to download a file like this:
import urllib
link = 'http://ir.30nama.download/movies/t/The_Huntsman_Winters_War_2016_EXTENDED_Dubbed_1080p_BrRip_30nama_30NAMA.mkv?md5=E38HpAmjkzwU7Fpag-YvtA&expires=1529934194&refresh=4918368251152863819423374231251501'
filename = link[link.rfind('/') + 1:].split('?')[0]
response = urllib.URLopener()
response.addheader('User-Agent',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11')
response.addheader('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
response.addheader('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.3')
response.addheader('Accept-Encoding', 'none')
response.addheader('Accept-Language', 'en-US,en;q=0.8')
response.addheader('Connection', 'keep-alive')
response.addheader('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
response.retrieve(link, 'test.mkv')
I've added headers exactly like this post,
but the result is:
Traceback (most recent call last):
File "downloader.py", line 29, in <module>
response.retrieve(item['src_link'], consts.pdp_root + filename)
File "/usr/lib/python2.7/urllib.py", line 245, in retrieve
fp = self.open(url, data)
File "/usr/lib/python2.7/urllib.py", line 213, in open
return getattr(self, name)(url)
File "/usr/lib/python2.7/urllib.py", line 364, in open_http
return self.http_error(url, fp, errcode, errmsg, headers)
File "/usr/lib/python2.7/urllib.py", line 381, in http_error
return self.http_error_default(url, fp, errcode, errmsg, headers)
File "/usr/lib/python2.7/urllib.py", line 386, in http_error_default
raise IOError, ('http error', errcode, errmsg, headers)
IOError: ('http error', 403, 'Forbidden', <httplib.HTTPMessage instance at 0x7f98ec3d92d8>)
What should I do?
Due to this answer, 30nama website blocks non-browser agents.
You can try stream requests instead.
import requests
import shutil
link = 'http://ir.30nama.download/movies/t/The_Huntsman_Winters_War_2016_EXTENDED_Dubbed_1080p_BrRip_30nama_30NAMA.mkv?md5=E38HpAmjkzwU7Fpag-YvtA&expires=1529934194&refresh=4918368251152863819423374231251501'
filename = link[link.rfind('/') + 1:].split('?')[0]
r = requests.get(link, stream=True)
if r.status_code == 200:
with open('test.mkv', 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)

Python Twitter API trying to retrieve tweet but error: AttributeError: 'int' object has no attribute 'encode'

why am I getting an AttributeError: 'int' object has no attribute 'encode'?
I am trying to retrieve a tweet using the Twitter API on Python. Full traceback here:
Traceback (most recent call last):
File "C:/Python27/lol.py", line 34, in <module>
headers = req.to_header()
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 398, in to_header
params_header = ', '.join(header_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 397, in <genexpr>
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 396, in <genexpr>
stringy_params = ((k, escape(v)) for k, v in oauth_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 163, in escape
s = s.encode('utf-8')
AttributeError: 'int' object has no attribute 'encode'
Below is the code I'm using.
import oauth2
import time
import urllib2
import json
url1="https://api.twitter.com/1.1/search/tweets.json"
params = {
"oauth_version": "1.9.0",
"oauth_nonce": oauth2.generate_nonce(),
"oauth_timestamp": int(time.time())
}
consumer = oauth2.Consumer(key="*********", secret="*********")
token = oauth2.Token(key="*********", secret="*********")
params["oauth_consumer_key"] = consumer.key
params["oauth_token"] = token.key
for i in range(1):
url = url1
req = oauth2.Request(method="GET", url=url, parameters=params)
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
headers = req.to_url()
print headers
print url
for i in range(1):
url = url1
params["q"] = "pictorial"
params["count"] = 2
req = oauth2.Request(method="GET", url=url, parameters=params)
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
headers = req.to_header()
url = req.to_url()
response = urllib2.Request(url)
data = json.load(urllib2.urlopen(response))
if data["statuses"] == []:
print "end of data"
break
else:
print data
And if I change int(time.time()) into str(time.time())
I get the following error:
Traceback (most recent call last):
File "C:/Python27/lol.py", line 37, in <module>
data = json.load(urllib2.urlopen(response))
File "C:\Python27\lib\urllib2.py", line 154, in urlopen
return opener.open(url, data, timeout)
File "C:\Python27\lib\urllib2.py", line 437, in open
response = meth(req, response)
File "C:\Python27\lib\urllib2.py", line 550, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python27\lib\urllib2.py", line 475, in error
return self._call_chain(*args)
File "C:\Python27\lib\urllib2.py", line 409, in _call_chain
result = func(*args)
File "C:\Python27\lib\urllib2.py", line 558, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
HTTPError: HTTP Error 400: Bad Request
"oauth_timestamp": int(time.time())
here you use an int, but that field must be a string.

urllib2.HTTPError - Getting Data from website on python

I have a script, and when running I get an error message:
urllib2.HTTPError: HTTP Error 400: Bad Request
Can you help me?
from lxml import html
import urllib2
import urllib
ip_list = []
port_list = []
protocol_list = []
array = [20, 40]
ck = True
i = 0
while i < len(array) :
h = urllib2.urlopen('http://proxylist.me/proxys/index/'+ str(array[i]))
HTML_CODE = h.read()
tree = html.fromstring(HTML_CODE)
for block in tree.xpath('//tbody/tr'):
ip, port, _, protocol, _, _, _, _, _ = [
x.strip()
for x in block.xpath('.//text()')
if x.strip() not in ""
]
ip_l = "{}".format(ip)
port_l = "{}".format(port)
protocol_l = "{}".format(protocol)
if ip_l != {}:
ck = True
ip_list.append(ip_l)
port_list.append(port_l)
protocol_list.append(protocol_l)
i = i+1
else:
ck = False
print ip_list
I am getting this error:
Traceback (most recent call last):
File "C:/Users/PC0308-PC/Desktop/get_data_html.py", line 11, in <module>
h = urllib2.urlopen('http://proxylist.me/proxys/index/'+str(i))
File "C:\Python27\lib\urllib2.py", line 154, in urlopen
return opener.open(url, data, timeout)
File "C:\Python27\lib\urllib2.py", line 437, in open
response = meth(req, response)
File "C:\Python27\lib\urllib2.py", line 550, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python27\lib\urllib2.py", line 475, in error
return self._call_chain(*args)
File "C:\Python27\lib\urllib2.py", line 409, in _call_chain
result = func(*args)
File "C:\Python27\lib\urllib2.py", line 558, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
urllib2.HTTPError: HTTP Error 400: Bad Request
array = [0, 20, 40]
ck = True
for item in array:
h = urllib2.urlopen('http://proxylist.me/proxys/index/%s'%(item))
HTML_CODE = h.read()
tree = html.fromstring(HTML_CODE)
for block in tree.xpath('//tbody/tr'):
ip, port, _, protocol, _, _, _, _, _ = [
x.strip()
for x in block.xpath('.//text()')
if x.strip() not in ""
]
ip_l = "{}".format(ip)
port_l = "{}".format(port)
protocol_l = "{}".format(protocol)
if ip_l != {}:
ck = True
ip_list.append(ip_l)
port_list.append(port_l)
protocol_list.append(protocol_l)
else:
ck = False
print ip_list
It works on my Windows machine, parses the first 3 pages from http://proxylist.me/proxys/index
BTW, your code worked well from the beginning, but it only parsed the first page.

Problems with uploading content to website

I am just a newly self-learned programmer. I was hoping to upload some numbers to my website by python But somewhat I failed. Could you help me figure out what is wrong?
Here is my original python code.
#!/usr/bin/python
import time
import urllib.request
import random
import datetime
from urllib.request import Request,urlopen
basic_web = 'http://ihome.ust.hk/~xxxxx/cgi-bin/datafile.php?'
message=""
while(True):
local_time= time.time()
web_x = basic_web
file1 = open("datalist1.txt", "r")
queue1 = file1.read()
file1.close()
web_x += "&queue1=" + queue1
file2 = open("datalist2.txt", "r")
queue2 = file2.read()
file2.close()
web_x += "&queue2=" + queue2
web_x += "&local_time=" + str (local_time)
print (web_x)
#req = Request (web_x)
#html = urlopen(req).read()
response = urllib.request.urlopen(web_x, timeout = 1)
html = response.read()
print(html)
time.sleep(0.1)
print ("hehe")
And here is the output error that I got:
Traceback (most recent call last):
File "C:\web bus stop\local\datauploader.py", line 25, in <module>
response = urllib.request.urlopen(web_x)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 162, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 465, in open
response = self._open(req, data)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 483, in _open
'_open', req)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 443, in _call_chain
result = func(*args)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 1268, in http_open
return self.do_open(http.client.HTTPConnection, req)
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\urllib\request.py", line 1243, in do_open
r = h.getresponse()
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\http\client.py", line 1174, in getresponse
response.begin()
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\http\client.py", line 282, in begin
version, status, reason = self._read_status()
File "C:\Users\ad\AppData\Local\Programs\Python\Python35-32\lib\http\client.py", line 264, in _read_status
raise BadStatusLine(line)
http.client.BadStatusLine: connected! queue1queue2 finish sir!
I would really appreciate it if you guys could help me figure out what is the bug.
Never mind.
I changed a computer to run it and it worked now.

proxy handler in python

when i disable proxy server in web browser setting and commenting the proxy handler coding it below code works fine.
import urllib2
import urllib2_file
import urllib
import random
import mimetypes
import string
from os import listdir
import time
from google.refine import refine
from google.refine import facet
proxy = urllib2.ProxyHandler({'http': '10.200.1.26'})
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
def encode_multipart(fields, files, boundary=None):
def escape_quote(s):
return s.replace('"', '\\"')
if boundary is None:
boundary = ''.join(random.choice(_BOUNDARY_CHARS) for i in range(30))
lines = []
for name, value in fields.items():
lines.extend((
'--{0}'.format(boundary),
'Content-Disposition: form-data; name="upload"', #.format(escape_quote(name)),
'',
str(value),
))
for name, value in files.items():
filename = value['filename']
if 'mimetype' in value:
mimetype = value['mimetype']
else:
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
lines.extend((
'--{0}'.format(boundary),
'Content-Disposition: form-data; name="upload"; filename="{0}"'.format(escape_quote(filename)),
'Content-Type: {0}'.format(mimetype),
'',
value['content'],
))
lines.extend((
'--{0}--'.format(boundary),
'',
))
body = '\r\n'.join(lines)
headers = {
'Content-Type': 'multipart/form-data; boundary={0}'.format(boundary),
'Content-Length': str(len(body)),
}
return (body, headers)
_BOUNDARY_CHARS = string.digits + string.ascii_letters
u = urllib2.urlopen("http://127.0.0.1:3333/command/core/create-importing-job",data=urllib.urlencode({"test":""}))
a=u.read()
id=""
for i in a:
if(i.isdigit()):
id+=str(i)
# sample output '{ "jobID" : 1393566803991 }'
files = {}
pathtoXML = r"C:\75"
#pathtoXML = r"C:\AM\trial"
for i in listdir(pathtoXML):
files[i] = {'filename': i, 'content': open(pathtoXML + "\\"+ i).read()}
#load raw data using the job id found in
url = "http://127.0.0.1:3333/command/core/importing-controller?controller=core%2Fdefault-importing-controller&jobI="+id+"&subCommand=load-raw-data"
data,headers = encode_multipart({}, files)
#print len(data)
#print headers
req = urllib2.Request(url, data=data, headers=headers)
f = urllib2.urlopen(req)
f.read()
# get job status
u=urllib2.urlopen("http://127.0.0.1:3333/command/core/get-importing-job-status?jobID="+id+"", "test")
u.read()
#from fileSelection update file selection
u=urllib2.urlopen("http://127.0.0.1:3333/command/core/importing-controller?controller=core%2Fdefault-importing-controller&subCommand=update-file-selection&jobID="+id+"", "fileSelection=%5B0%2C1%2C2%2C3%5D")
u.read()
#init parser format text
u=urllib2.urlopen("http://127.0.0.1:3333/command/core/importing-controller?controller=core%2Fdefault-importing-controller&jobID="+id+"&subCommand=initialize-parser-ui&format=text%2Fxml")
u.read()
#update format and options
updateformatoptionurl = "http://127.0.0.1:3333/command/core/importing-controller?controller=core%2Fdefault-importing-controller&jobID="+id+"&subCommand=update-format-and-options"
d=urllib.urlencode({"format":"text/xml","options":{"recordPath":["ArrayOfAfiles","Afiles"],"limit":-1,"includeFileSources":"false","guessCellValueTypes":"false"}})
u=urllib2.urlopen(updateformatoptionurl,d)
u.read()
'{"status":"ok"}'
#get-models
u=urllib2.urlopen("http://127.0.0.1:3333/command/core/get-models?importingJobID="+id)
u.read()
# create project from import job
createfromimporturl = "http://127.0.0.1:3333/command/core/importing-controller?controller=core%2Fdefault-importing-controller&jobID="+id+"&subCommand=create-project"
d=urllib.urlencode({"format":"text/xml","options":{"recordPath":["ArrayOfAfiles","Afiles"],"limit":-1,"includeFileSources":"false","projectName":time.ctime()}})
u=urllib2.urlopen(createfromimporturl, d)
r=u.read()
After embedding proxy handler coding its not working when i ran the code its complaining:
Traceback (most recent call last):
File "C:\hari\trial.py", line 87, in <module>
u = urllib2.urlopen("http://127.0.0.1:3333/command/core/create-importing-job",data=urllib.urlencode({"test":""}))
File "C:\Python27\lib\urllib2.py", line 126, in urlopen
return _opener.open(url, data, timeout)
File "C:\Python27\lib\urllib2.py", line 391, in open
response = self._open(req, data)
File "C:\Python27\lib\urllib2.py", line 409, in _open
'_open', req)
File "C:\Python27\lib\urllib2.py", line 369, in _call_chain
result = func(*args)
File "C:\Python27\urllib2_file.py", line 207, in http_open
return self.do_open(httplib.HTTP, req)
File "C:\Python27\urllib2_file.py", line 298, in do_open
return self.parent.error('http', req, fp, code, msg, hdrs)
File "C:\Python27\lib\urllib2.py", line 435, in error
return self._call_chain(*args)
File "C:\Python27\lib\urllib2.py", line 369, in _call_chain
result = func(*args)
File "C:\Python27\lib\urllib2.py", line 518, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
HTTPError: HTTP Error 404: Not Found

Categories