Finding nearby cities using Google API - python

I want to get nearby cities from passed latitude and longitude. I have used the geonames and geobytes APIs but want to use Google API for finding nearby cities.
This is my code:
def getNearbyCities(self):
# lat, lon = self.getLatLon()
# res_url = urlopen('http://gd.geobytes.com/GetNearbyCities?callback=?&radius=100&limit=100&Latitude=' + str(lat) + '&Longitude=' + str(lon))
res_url = urlopen('http://getnearbycities.geobytes.com/GetNearbyCities?callback=?&radius=100&locationcode=' + str(self.city))
resp = str(res_url.read())
print(resp)
validate_res = resp.split("b'?(")[-1].split(");'")[0]
validated_res = ast.literal_eval(validate_res)
cities_nd_distence = []
for data in validated_res:
data_tuple = (data[1], data[7])
if data[1] not in cities_nd_distence:
cities_nd_distence.append(data_tuple)
import pprint
pprint.pprint(cities_nd_distence)
return cities_nd_distence

If you only want to get cities based on latitude and longitude, you can have a look at https://github.com/coderholic/django-cities
from cities.models import City
from django.contrib.gis.geos import Point
from django.contrib.gis.db.models.functions import Distance
p = Point(-118, 34, srid=4326)
City.objects.annotate(distance=Distance('location', p)).order_by("distance").first()
<City: Hacienda Heights>

Related

How to save the results of a function as a new CSV?

The code is required to take addresses from a csv file and then use a function to compute the corresponding Latitudes and Longitudes. While I get the correct Latitudes and Longitudes but I am unable to save them to a new csv file.
import requests
import urllib.parse
import pandas as pd
#function to get the Coordinates:
def lat_long(add):
url = 'https://nominatim.openstreetmap.org/search/'+urllib.parse.quote(add)+'?format=json'
response = requests.get(url).json()
print(response[0]["lat"], response[0]["lon"])
return
#function is called to get the 5 Address Values from the CSV File and pass on to the function
df = pd.read_csv('C:\\Users\\Umer Abbas\\Desktop\\lat_long.csv')
i = 0
print("Latitude","","Longitude")
for i in range (0,5):
add = df._get_value(i, 'Address')
lat_long(add)
Output is:
Latitude Longitude
34.0096961 71.8990106
34.0123846 71.5787458
33.6038766 73.048136
33.6938118 73.0651511
24.8546842 67.0207055
I want to save this output into a new file and I am unable to get the results.
Just a small modification might help
def lat_long(add):
url = 'https://nominatim.openstreetmap.org/search/'+urllib.parse.quote(add)+'?format=json'
response = requests.get(url).json()
print(response[0]["lat"], response[0]["lon"])
Lat = response[0]["lat"]
Long = response[0]["lon"]
return Lat, Long
Lat_List = []
Long_List = []
df = pd.read_csv('C:\\Users\\Umer Abbas\\Desktop\\lat_long.csv')
i = 0
print("Latitude","","Longitude")
for i in range (0,5):
add = df._get_value(i, 'Address')
Lat =lat_long(add)[0]
Long = lat_long(add)[1]
Lat_List.append(Lat)
Long_List.append(Long)
df1 = pd.DataFrame(data, columns=['Latitude', 'Longitude])
df1['Latitude'] = Lat_List
df1['Longitude'] = Long_List
df1.to_csv("LatLong.csv)
#one line of change here
def lat_long(add):
url = 'https://nominatim.openstreetmap.org/search/'+urllib.parse.quote(add)+'?format=json'
response = requests.get(url).json()
print(response[0]["lat"], response[0]["lon"])
return response[0]["lat"], response[0]["lon"] # return the lat and long
# three lines added here
df = pd.read_csv('C:\\Users\\Umer Abbas\\Desktop\\lat_long.csv')
i = 0
l=[] # define empty list
print("Latitude","","Longitude")
for i in range (0,5):
add = df._get_value(i, 'Address')
l.append(lat_long(add)) # append to the empty l
# create a dataframe and output as csv
pd.DataFrame(l, columns=['Longitude', 'Latitude']).to_csv('test.csv', sep= ' ')

Retrieving data from the Air Quality Index (AQI) website through the API and only recieving small nr. of stations

I'm working on a personal project and I'm trying to retrieve air quality data from the https://aqicn.org website using their API.
I've used this code, which I've copied and adapted for the city of Bucharest as follows:
import pandas as pd
import folium
import requests
# GET data from AQI website through the API
base_url = "https://api.waqi.info"
path_to_file = "~/path"
# Got token from:- https://aqicn.org/data-platform/token/#/
with open(path_to_file) as f:
contents = f.readlines()
key = contents[0]
# (lat, long)-> bottom left, (lat, lon)-> top right
latlngbox = "44.300264,25.920181,44.566991,26.297836" # For Bucharest
trail_url=f"/map/bounds/?token={key}&latlng={latlngbox}" #
my_data = pd.read_json(base_url + trail_url) # Joined parts of URL
print('columns->', my_data.columns) #2 cols ‘status’ and ‘data’ JSON
### Built a dataframe from the json file
all_rows = []
for each_row in my_data['data']:
all_rows.append([each_row['station']['name'],
each_row['lat'],
each_row['lon'],
each_row['aqi']])
df = pd.DataFrame(all_rows, columns=['station_name', 'lat', 'lon', 'aqi'])
# Cleaned the DataFrame
df['aqi'] = pd.to_numeric(df.aqi, errors='coerce') # Invalid parsing to NaN
# Remove NaN entries in col
df1 = df.dropna(subset = ['aqi'])
Unfortunately it only retrieves 4 stations whereas there are many more available on the actual site. In the API documentation the only limitation I saw was for "1,000 (one thousand) requests per second" so why can't I get more of them?
Also, I've tried to modify the lat-long values and managed to get more stations, but they were outside the city I was interested in.
Here is a view of the actual perimeter I've used in the embedded code.
If you have any suggestions as of how I can solve this issue, I'd be very happy to read your thoughts. Thank you!
Try using waqi through aqicn... not exactly a clean API but I found it to work quite well
import pandas as pd
url1 = 'https://api.waqi.info'
# Get token from:- https://aqicn.org/data-platform/token/#/
token = 'XXX'
box = '113.805332,22.148942,114.434299,22.561716' # polygon around HongKong via bboxfinder.com
url2=f'/map/bounds/?latlng={box}&token={token}'
my_data = pd.read_json(url1 + url2)
all_rows = []
for each_row in my_data['data']:
all_rows.append([each_row['station']['name'],each_row['lat'],each_row['lon'],each_row['aqi']])
df = pd.DataFrame(all_rows,columns=['station_name', 'lat', 'lon', 'aqi'])
From there its easy to plot
df['aqi'] = pd.to_numeric(df.aqi,errors='coerce')
print('with NaN->', df.shape)
df1 = df.dropna(subset = ['aqi'])
df2 = df1[['lat', 'lon', 'aqi']]
init_loc = [22.396428, 114.109497]
max_aqi = int(df1['aqi'].max())
print('max_aqi->', max_aqi)
m = folium.Map(location = init_loc, zoom_start = 5)
heat_aqi = HeatMap(df2, min_opacity = 0.1, max_val = max_aqi,
radius = 60, blur = 20, max_zoom = 2)
m.add_child(heat_aqi)
m
Or as such
centre_point = [22.396428, 114.109497]
m2 = folium.Map(location = centre_point,tiles = 'Stamen Terrain', zoom_start= 6)
for idx, row in df1.iterrows():
lat = row['lat']
lon = row['lon']
station = row['station_name'] + ' AQI=' + str(row['aqi'])
station_aqi = row['aqi']
if station_aqi > 300:
pop_color = 'red'
elif station_aqi > 200:
pop_color = 'orange'
else:
pop_color = 'green'
folium.Marker(location= [lat, lon],
popup = station,
icon = folium.Icon(color = pop_color)).add_to(m2)
m2
checking for stations within HK, returns 19
df[df['station_name'].str.contains('HongKong')]

Get time from city name using Python

As you can see in the title, I want to find the time of given city in Python. How can I achieve this? I've tried geopy and timezonefinder modules but they are giving me different results too. (like 'What time is it in Spotify?', 'It's 12:04')
What I'm trying to achieve is:
What time is it in California?
It's 16:15
THE CODE
import nltk
import datetime
import calendar
import pytz
from geopy.geocoders import Nominatim
from timezonefinder import TimezoneFinder
self.inp = input("City name: ")
# Find city name using NLP
# Get city name
findCityName = str(self.inp.title())
# NLP
word = nltk.word_tokenize(findCityName)
pos_tag = nltk.pos_tag(word)
chunk = nltk.ne_chunk(pos_tag)
self.inp = [ " ".join(w for w, t in ele) for ele in chunk if isinstance(ele, nltk.Tree)]
self.inp = ' '.join(self.inp)
# Get lat, long from city name
geolocator = Nominatim(user_agent='xxx')
location = geolocator.geocode(self.inp.capitalize())
# Get timezone from coordinates
tf = TimezoneFinder()
latitude, longitude = location.latitude, location.longitude
# Timezone
datez = tf.timezone_at(lng=longitude, lat=latitude)
datez = str(datez)
globalDate = datetime.datetime.now(pytz.timezone(datez))
print("The date in " + str(self.inp) + " is: " + globalDate.strftime('%A, %m/%d/%y'))

Use variable from function that takes argument in another function

So I'm making this Weather app since API for weather data takes only latitude and longitude (afaik) I'm using another API to get them by city name. So GetGeo takes city name and returns latitude and longitude which then I need to use in GetWeather function the problem is I don't know how to make them accessible in this function, the only way I found is lat, lon = GetGeo() but this isn't working since GetGeo is taking argument which I pass in from input field. I understand that this probably isn't the best way to do this but I'm a beginner and I already made this work without having separate functions, but I like to challenge myself and want to separate everything in functions :)
def GetGeo(cityName):
locationKey = 'xxx'
locationUrl = "https://eu1.locationiq.com/v1/search.php?key=" + locationKey + '&q=' + cityName + '&format=json'
locationDataReq = requests.get(locationUrl)
locationData = locationDataReq.json()
lat = locationData[0]['lat']
lon = locationData[0]['lon']
return lat, lon
def GetWeather():
lat, lon = GetGeo()
darkSkyKey = 'xxx'
darkSkyUrl = "https://api.darksky.net/forecast/" + darkSkyKey + "/" + lat + "," + lon + "?units=si"
darkSkyDataReq = requests.get(darkSkyUrl)
darkSkyData = darkSkyDataReq.json()
label["text"] = darkSkyData
print(darkSkyData)
How about:
def GetGeo(cityName):
locationKey = 'xxx'
locationUrl = "https://eu1.locationiq.com/v1/search.php?key=" + locationKey + '&q=' + cityName + '&format=json'
locationDataReq = requests.get(locationUrl)
locationData = locationDataReq.json()
lat = locationData[0]['lat']
lon = locationData[0]['lon']
return lat, lon
def GetWeather(cityName):
lat, lon = GetGeo(cityName)
darkSkyKey = 'xxx'
darkSkyUrl = "https://api.darksky.net/forecast/" + darkSkyKey + "/" + lat + "," + lon + "?units=si"
darkSkyDataReq = requests.get(darkSkyUrl)
darkSkyData = darkSkyDataReq.json()
label["text"] = darkSkyData
print(darkSkyData)
qCity=input('please type city: ')
GetWeather(qCity)

Obtain elevation from latitude longitude coordinates with a simple python script

I have a python script that I got from this question that will pull from the USGS Elevation Point Query Service. However, It keeps timing out and kicks me out after a seemingly random amount of time and before my query finishes. I need another method to pull elevation data given lat lon coordinates.
Here is my current query:
# ========= pull elev from usgs server ======
# USGS POINT QUERY SERVICE ==================
url = r'https://nationalmap.gov/epqs/pqs.php?'
# ===========================================
# coordinates with known elevation
lat = [48.633, 48.733, 45.1947, 45.1962]
lon = [-93.9667, -94.6167, -93.3257, -93.2755]
# create df
df = pd.DataFrame({
'lat': lat,
'lon': lon
})
def elevation_function(df, lat_column, long_column):
elevations = []
counter = 0
start = time.time()
for lat, lon in zip(df[lat_column], df[long_column]):
# define rest query params
params = {
'output': 'json',
'x': lon,
'y': lat,
'units': 'Meters'
}
# format query string and return query value
result = requests.get((url + urllib.parse.urlencode(params)))
elevations.append(result.json()['USGS_Elevation_Point_Query_Service']['Elevation_Query']['Elevation'])
counter += 1
print('Proportion of job complete: {}'.format(round(counter/df.shape[0],3)))
end = time.time()
print(str(round(end - start)) + " seconds into job\n")
df['elev'] = elevations
return elevations
start = time.time()
count = 0
for i in range(100):
count += 1
elevations = elevation_function(df, lat_column='lat', long_column='lon')
end = time.time()
print(str(round(end - start)))
Streamline the function and add error handling:
elevation_function needs to be written to work with pandas.DataFrame.apply
Using apply, with axis=1, automatically iterates through each row of coordinates
New Functions:
make_remote_request will continue to make the request until it gets response.
Change the exception to fit the exception returned by the server (e.g. except (OSError, urllib3.exceptions.ProtocolError) as error)
Optionally, import time and add time.sleep(5) before continue in the exception, to play nice with the remote server.
def make_remote_request(url: str, params: dict) -> json:
"""
Makes the remote request
Continues making attempts until it succeeds
"""
count = 1
while True:
try:
response = requests.get((url + urllib.parse.urlencode(params)))
except (OSError, urllib3.exceptions.ProtocolError) as error:
print('\n')
print('*' * 20, 'Error Occured', '*' * 20)
print(f'Number of tries: {count}')
print(f'URL: {url}')
print(error)
print('\n')
count += 1
continue
break
return response
def eleveation_function(x):
url = 'https://nationalmap.gov/epqs/pqs.php?'
params = {'x': x[1],
'y': x[0],
'units': 'Meters',
'output': 'json'}
result = make_remote_request(url, params)
return result.json()['USGS_Elevation_Point_Query_Service']['Elevation_Query']['Elevation']
Implement the function
import requests
import urllib
import urllib3
import pandas as pd
# coordinates with known elevation
lat = [48.633, 48.733, 45.1947, 45.1962]
lon = [-93.9667, -94.6167, -93.3257, -93.2755]
# create df
df = pd.DataFrame({'lat': lat, 'lon': lon})
lat lon
48.6330 -93.9667
48.7330 -94.6167
45.1947 -93.3257
45.1962 -93.2755
# apply the function
df['elevations'] = df.apply(eleveation_function, axis=1)
lat lon elevations
48.6330 -93.9667 341.14
48.7330 -94.6167 328.80
45.1947 -93.3257 262.68
45.1962 -93.2755 272.64
Can also pass in params with the following:
PARAMS = {'x':x[1], 'y':x[0], 'units':'Feet', 'output':'json'}
r = requests.get(url = URL, params = PARAMS)

Categories