google distance matrix with python - python

My problem is i want to get the distance for around 4000 lat longs . I have used Google Direction matrix service for this. To certain extend it worked fine for me. After that i am getting the error of Invalid request .Its because of GET method. I want a solution how can i use Distance matrix service with POST using Python.
Appreciate your help on this. Thanks in Advance.
I am writing a google distance matrix with python using appengine, i got struck with urllib post method
code:
url = 'http://maps.googleapis.com/maps/api/distancematrix/json'
conn = getConnection()
cursor = conn.cursor()
origins=[]
try:
cursor.execute('select username,lat,lng,cabNo,orderno from tripsheet order by username;')
origins= cursor.fetchall()
except:
self.response.out.write("Some thing bad happened")
conn.close()
responseArray= []
for o in origins :
origin= {}
key= "blah"
origin = {"name":o[0],"key":key, "latitude":o[1],"longitude":o[2],"cabNo":o[3],"order":o[4]}
responseArray.append(origin)
url=url+o[1]+','+o[2]+'|'
values = {
'sensor' : 'false',
'mode' : 'driving',
'avoid' : 'tolls',
'destinations': '%s,%s' % (destination["lat"] ,destination["lon"])
}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
response = urllib2.urlopen(req)
the_page = response.read()
self.response.out.write(the_page)
i am trying passing around 4000 origin and single destination.
getting below error message because it is taking as GET . I wanted to convert into POST using urllib2:
Traceback (most recent call last):
File "/home/xxx/Projects/google_appengine/google/appengine/ext/webapp/_webapp25.py", line 714, in __call__
handler.get(*groups)
File "/home/xxx/4.2WorkSpace/RouteOptimization/src/main.py", line 41, in get
self.calculate_indv_distance(destination)
File "/home/xxx/4.2WorkSpace/RouteOptimization/src/main.py", line 109, in calculate_indv_distance
response = urllib2.urlopen(req)
File "/usr/lib/python2.7/urllib2.py", line 126, in urlopen
return _opener.open(url, data, timeout)
File "/usr/lib/python2.7/urllib2.py", line 400, in open
response = self._open(req, data)
File "/usr/lib/python2.7/urllib2.py", line 418, in _open
'_open', req)
File "/usr/lib/python2.7/urllib2.py", line 378, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 1207, in http_open
return self.do_open(httplib.HTTPConnection, req)
File "/usr/lib/python2.7/urllib2.py", line 1182, in do_open
r = h.getresponse()
File "/home/xxx/Projects/google_appengine/google/appengine/dist/httplib.py", line 222, in getresponse
deadline=self.timeout)
File "/home/xxx/Projects/google_appengine/google/appengine/api/urlfetch.py", line 266, in fetch
return rpc.get_result()
File "/home/xxx/Projects/google_appengine/google/appengine/api/apiproxy_stub_map.py", line 604, in get_result
return self.__get_result_hook(self)
File "/home/xxx/Projects/google_appengine/google/appengine/api/urlfetch.py", line 370, in _get_fetch_result
'Invalid request URL: ' + url + error_detail)
InvalidURLError: Invalid request URL: http://maps.googleapis.com/maps/api/distancematrix/
Any help really appreciate.

Related

Python | Request POST method in python 2.7

I am trying to connect to an API using python 2.7.
Code:
from urllib import urlencode
import urllib2
def http_post(url, data):
post = urlencode(data)
req = urllib2.Request(url, post)
response = urllib2.urlopen(req)
return response.read()
Error:
>>> r = http_post(LOGIN_URL, PARAMS)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 4, in http_post
File "/usr/local/lib/python2.7/urllib2.py", line 127, in urlopen
return _opener.open(url, data, timeout)
File "/usr/local/lib/python2.7/urllib2.py", line 404, in open
response = self._open(req, data)
File "/usr/local/lib/python2.7/urllib2.py", line 422, in _open
'_open', req)
File "/usr/local/lib/python2.7/urllib2.py", line 382, in _call_chain
result = func(*args)
File "/usr/local/lib/python2.7/urllib2.py", line 1222, in https_open
return self.do_open(httplib.HTTPSConnection, req)
File "/usr/local/lib/python2.7/urllib2.py", line 1184, in do_open
raise URLError(err)
urllib2.URLError: <urlopen error [Errno -5] No address associated with hostname
Similar code in python 3.5 is running.
It looks like the url is not being found.
Have you defined LOGIN_URL just above the output we see in your "Error:" extract?

raspberry pi can't make any url requests, connection timed out

I need to update my sensor data to thingspeak , so I was doing it with the python thingspeak library. Two days ago it was working fine but now it doesn't work, the connection times out , I also tried to update it with urllib2 ,that too doesnt' work.
My net connection is fine, I am able to open webpages on Pi and I can update the channel from my laptop using thingspeak library as well as urllib2.
could someone kindly help me.
my code with thingspeak library:
node = False
channel_id = ""
write = ""
if data['MAC'] in ':ab:35':
channel_id = "XXXXX"
write = "XXXXXXXXXXX"
node = True
if node:
channel = thingspeak.Channel(id=channel_id,write_key=write)
try :
response=channel.update({1:data['TEMP'],2:data['VOLT'],3:data['PRES'],4:data['HUM']})
print response
print 'Thingspeak updated!!!'
except :
print "connection failed"
when i try urllib2 :
f = urllib2.urlopen(url)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python2.7/urllib2.py", line 154, in urlopen
return opener.open(url, data, timeout)
File "/usr/lib/python2.7/urllib2.py", line 431, in open
response = self._open(req, data)
File "/usr/lib/python2.7/urllib2.py", line 449, in _open
'_open', req)
File "/usr/lib/python2.7/urllib2.py", line 409, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 1240, in https_open
context=self._context)
File "/usr/lib/python2.7/urllib2.py", line 1197, in do_open
raise URLError(err)
urllib2.URLError: <urlopen error [Errno 110] Connection timed out>
when I try thingspeak library :
>>> channel = thingspeak.Channel(id=c,write_key=w)
>>> res = channel.update({1:50,2:30,3:70,4:20})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "build/bdist.linux-armv7l/egg/thingspeak/thingspeak.py", line 116, in update
File "/usr/lib/python2.7/dist-packages/requests/api.py", line 94, in post
return request('post', url, data=data, json=json, **kwargs)
File "/usr/lib/python2.7/dist-packages/requests/api.py", line 49, in request
return session.request(method=method, url=url, **kwargs)
File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 457, in request
resp = self.send(prep, **send_kwargs)
File "/usr/lib/python2.7/dist-packages/requests/sessions.py", line 569, in send
r = adapter.send(request, **kwargs)
File "/usr/lib/python2.7/dist-packages/requests/adapters.py", line 407, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: ('Connection aborted.', error(110, 'Connection timed out'))
I am using the raspberry-pi to send data to thingspeak.
What I do is something like this:
import httplib
import urllib
#Preparing to send the information to the cloud
params=urllib.urlencode({'field1':liters,'key':key})
headers={"Content-typZZe": "application/x-www-form-urlencoded","Accept":"text/plain"}
conn=httplib.HTTPConnection("api.thingspeak.com:80")
#Sending the data to the thingspeak platform
try:
conn.request("POST", "/update", params, headers)
response=conn.getresponse()
print response.status, response.reason
data=response.read()
conn.close()
except:
print "connection failed"
And it works just fine.
The key is the string of your channel key.
Hope it helps.
If I use SSH and try to run the script, it works .
Add a new User-Agent, by creating Request objects & using them as arguments for urlopen:
import urllib2
request = urllib2.Request('http://www.example.com/')
request.add_header('User-agent', 'Mozilla/5.0 (Linux i686)')
response = urllib2.urlopen(request)

Would a dns name not existing for an IP cause this python error message?

I am attempting to run the following script using the suds library:
from suds.client import Client
from suds.transport.http import HttpAuthenticated
import os
t = HttpAuthenticated(username='xxxxxxx', password='xxxxxxxx')
url = 'http://10.14.9.42/NodeBeanService/NodeBean?wsdl'
client = Client(url, transport=t)
filterlimit = client.factory.create('ns3:constraint')
filterlimit.name = "maxObjects"
filterlimit.value = "30000"
filter1 = client.factory.create('ns3:condition')
filter1.name = "status"
filter1.operator = "EQ"
filter1.value = "NORMAL"
filter2 = client.factory.create('ns3:condition')
filter2.name = "notes"
filter2.operator = "NE"
filter2.value = "none"
filter = client.factory.create('ns3:expression')
filter.operator = "AND"
filter.subFilters = [filter1, filter2, filterlimit]
allNodes = client.service.getNodes(filter)
print "Nodes in topology:", len(allNodes.item)
for i in allNodes.item[:]:
print i.name,i.notes,i.id
This script works perfectly fine with one server but when I try another server that does not have a dns name assigned to it I keep getting the following error:
C:\Users\pzsr7z.000\Documents>python testnnm.py
Traceback (most recent call last):
File "testnnm.py", line 27, in <module>
allNodes = client.service.getNodes(filter)
File "C:\Python27\lib\site-packages\suds\client.py", line 521, in __call__
return client.invoke(args, kwargs)
File "C:\Python27\lib\site-packages\suds\client.py", line 581, in invoke
result = self.send(soapenv)
File "C:\Python27\lib\site-packages\suds\client.py", line 613, in send
reply = self.options.transport.send(request)
File "C:\Python27\lib\site-packages\suds\transport\http.py", line 239, in send
return HttpTransport.send(self, request)
File "C:\Python27\lib\site-packages\suds\transport\http.py", line 82, in send
fp = self.u2open(u2request)
File "C:\Python27\lib\site-packages\suds\transport\http.py", line 132, in u2open
return url.open(u2request, timeout=tm)
File "C:\Python27\lib\urllib2.py", line 431, in open
response = self._open(req, data)
File "C:\Python27\lib\urllib2.py", line 449, in _open
'_open', req)
File "C:\Python27\lib\urllib2.py", line 409, in _call_chain
result = func(*args)
File "C:\Python27\lib\urllib2.py", line 1227, in http_open
return self.do_open(httplib.HTTPConnection, req)
File "C:\Python27\lib\urllib2.py", line 1197, in do_open
raise URLError(err)
urllib2.URLError: <urlopen error [Errno 11004] getaddrinfo failed>
What could be causing this error?
EDIT:
Also this script is being run from a windows 7 environment
I was able to get things working correctly by adding the server's IP to my hosts file and then associating it with the hostname that is hardcoded on the server

Scraping in python to using open api

i want to scraping "www.naver.com"
so i tried to scraping using open api
i wrote code following this:
import urllib.request
import urllib.parse
from bs4 import BeautifulSoup
defaultURL = 'http://openapi.naver.com/search?&'
key = 'key=keyvalue'
target='&target=news'
sort='&sort=sim'
start='&start=1'
display='&display=100'
query='&query='+urllib.parse.quote_plus(str(input("write:")))
fullURL=defaultURL+key+target+sort+start+display+query
print(fullURL)
file=open("C:\\Users\\kimty\\Desktop\\k\\python\\N\\naver_news.txt","w",encoding='utf-8')
f=urllib.request.urlopen(fullURL)
resultXML=f.read()
xmlsoup=BeautifulSoup(resultXML,'html.parser')
items=xmlsoup.find._all('item')
for item in items:
file.write('---------------------------------------\n')
file.write('title :'+item.tile.get_text(strip=True)+'\n')
file.write('contents : '+item.description.get_text(strip=True)+'\n')
file.write('\n')
file.close()
but python shell only show this
============= RESTART: C:\Users\kimty\Desktop\kpython\N\N.py =============
write:lee
http://openapi.naver.com/search?&key=keyvalue&target=news&sort=sim&start=1&display=100&query=lee
Traceback (most recent call last):
File "C:\Users\kimty\Desktop\k\python\N\N.py", line 19, in <module>
f=urllib.request.urlopen(fullURL)
File "C:\Python34\lib\urllib\request.py", line 161, in urlopen
return opener.open(url, data, timeout)
File "C:\Python34\lib\urllib\request.py", line 464, in open
response = self._open(req, data)
File "C:\Python34\lib\urllib\request.py", line 482, in _open
'_open', req)
File "C:\Python34\lib\urllib\request.py", line 442, in _call_chain
result = func(*args)
File "C:\Python34\lib\urllib\request.py", line 1211, in http_open
return self.do_open(http.client.HTTPConnection, req)
File "C:\Python34\lib\urllib\request.py", line 1186, in do_open
r = h.getresponse()
File "C:\Python34\lib\http\client.py", line 1227, in getresponse
response.begin()
File "C:\Python34\lib\http\client.py", line 386, in begin
version, status, reason = self._read_status()
File "C:\Python34\lib\http\client.py", line 356, in _read_status
raise BadStatusLine(line)
http.client.BadStatusLine: ''
why this happening?
what about that python shell talk to me?
i am using windows 8.1 64x, python 3.4.4
This http.client.BadStatusLine is a subclass of http.client.HTTPException. It gave you a http error back, maybe your API key is wrong! If I try to access the link with my browser it also gives me an error.
This is the exact address you tried to request.
Edit
Some people have fixed this error by importing the http lib.

Go to next item in list on error

I am pulling websites from a list and want to test, whether they are up or down. The code below works fine as long as they are up, but as soon as something is wrong with one of these urls, I get an error message and the whole scrip stops.
What I want to achieve: Error message == website not working therefore print down and move to next item in list.
import urllib2
from urllib2 import Request, urlopen, HTTPError, URLError
def checkurl(z):
user_agent = 'Mozilla/20.0.1 (compatible; MSIE 5.5; Windows NT)'
headers = { 'User-Agent':user_agent }
link = "http://"+z
req = Request(link, headers = headers)
try:
page_open = urlopen(req)
except HTTPError, e:
print "down"
else:
print 'up'
#print urllib2.urlopen('http://'+z).read()
Traceback (most recent call last):
File "/home/user/Videos/python/onion/qweqweqweq.py", line 48, in <module>
checkurl(x)
File "/home/user/Videos/python/onion/qweqweqweq.py", line 23, in checkurl
page_open = urlopen(req)
File "/usr/lib/python2.7/urllib2.py", line 127, in urlopen
return _opener.open(url, data, timeout)
File "/usr/lib/python2.7/urllib2.py", line 401, in open
response = self._open(req, data)
File "/usr/lib/python2.7/urllib2.py", line 419, in _open
'_open', req)
File "/usr/lib/python2.7/urllib2.py", line 379, in _call_chain
result = func(*args)
File "/usr/lib/python2.7/urllib2.py", line 1211, in http_open
return self.do_open(httplib.HTTPConnection, req)
File "/usr/lib/python2.7/urllib2.py", line 1178, in do_open
h.request(req.get_method(), req.get_selector(), req.data, headers)
File "/usr/lib/python2.7/httplib.py", line 962, in request
self._send_request(method, url, body, headers)
File "/usr/lib/python2.7/httplib.py", line 996, in _send_request
self.endheaders(body)
File "/usr/lib/python2.7/httplib.py", line 958, in endheaders
self._send_output(message_body)
File "/usr/lib/python2.7/httplib.py", line 818, in _send_output
self.send(msg)
File "/usr/lib/python2.7/httplib.py", line 780, in send
self.connect()
File "/usr/lib/python2.7/httplib.py", line 761, in connect
self.timeout, self.source_address)
File "/home/user/Videos/python/onion/qweqweqweq.py", line 5, in create_connection
sock.connect(address)
File "/usr/lib/python2.7/dist-packages/socks.py", line 369, in connect
self.__negotiatesocks5(destpair[0],destpair[1])
File "/usr/lib/python2.7/dist-packages/socks.py", line 236, in __negotiatesocks5
raise Socks5Error(ord(resp[1]),_generalerrors[ord(resp[1])])
TypeError: __init__() takes exactly 2 arguments (3 given)
You are catching HTTPError, but what is thrown is Socks5Error.
You're missing Socks5Error in your except clause. Look at the traceback:
raise Socks5Error(ord(resp[1]),_generalerrors[ord(resp[1])])
Note that this wouldn't have happened if you used requests instead of urllib2. The interface is a lot clearer, the documentation better.
In answer to "would it be possible to assume that the website is down regardless of the error", then this will do it:
req = Request(link, headers = headers)
try:
page_open = urlopen(req)
except:
print "down"
else:
print 'up'

Categories