Latitude and Longitude to Address - python
I have about 8000 + Latitudes and Longitudes.
I need to extract(reverse code), them to location, possibly city.
Initially I checked with Google Maps, but according to their terms we should not hit their services with our scripts.
Even the OpenStreetMaps doesnt allow us to hit their servers repeatedly. They have some time limit.
So, I downloaded the latitudes and logitudes for locations. I wrote a python script,
import tabular as tb
import csv
citiesLatLongData = tb.tabarray(SVfile="D:/latitude-longitude/citieslatlong.csv")
allData = tb.tabarray(SVfile="C:/Users/User/Desktop/alldata.csv")
latlonglocs = {'a1':"Car Nicobar",'a2':"Port Blair",'a3':"Hyderabad",'a4':"Kadapa",'a5':"Puttaparthi",
'a6':"Rajahmundry",'a7':"Tirupati",'a8':"Vijayawada",'a9':"Vishakhapatnam",'a10':"Itanagar",
'a11':"Dibrugarh",'a12':"Dispur",'a13':"Guwahati",'a14':"North Lakhimpur",'a15':"Silchar",
'a16':"Gaya",'a17':"Patna",'a18':"Chandigarh",'a19':"Raipur",'a20':"Silvassa",
'a21':"Daman",'a22':"Bawana",'a23':"New Delhi",'a24':"Mormugao",'a25':"Panaji",
'a26':"Ahmedabad",'a27':"Bhavnagar",'a28':"Bhuj",'a29':"Gandhinagar",'a30':"Jamnagar",
'a31':"Kandla",'a32':"Rajkot",'a33':"Vadodara",'a34':"Hisar",'a35':"Bilaspur",
'a36':"Dharamsala",'a37':"Kulu",'a38':"Shimla",'a39':"Jammu",'a40':"Srinagar",'a41':"Jamshedpur",
'a42':"Ranchi",'a43':"Bangalore",'a44':"Belgaum",'a45':"Bellary",'a46':"Hubli Dharwad",
'a47':"Mandya",'a48':"Mangalore",'a49':"Mysore",'a50':"Cochin",'a51':"Kozhikode",
'a52':"Thiruvananthapuram",'a53':"Bingaram Island ",'a54':"Kavaratti",'a55':"Bhopal",'a56':"Gwalior",
'a57':"Indore",'a58':"Jabalpur",'a59':"Khandwa",'a60':"Satna",'a61':"Ahmadnagar",
'a62':"Akola",'a63':"Aurangabad",'a64':"Jalna",'a65':"Kolhapur",'a66':"Mumbai",
'a67':"Nagpur",'a68':"Nasik",'a69':"Pimpri",'a70':"Pune",'a71':"Solapur",
'a72':"Imphal",'a73':"Shillong",'a74':"Aizawl",'a75':"Kohima",'a76':"Bhubaneswar",
'a77':"Jharsuguda",'a78':"Karaikal",'a79':"Mahe",'a80':"Pondicherry",'a81':"Yanam",
'a82':"Amritsar",'a83':"Pathankot",'a84':"Jaipur",'a85':"Jodhpur",'a86':"Kota",
'a87':"Udaipur",'a88':"Gangtok",'a89':"Chennai",'a90':"Coimbatore",'a91':"Madurai",
'a92':"Nagercoil",'a93':"Thiruchendur",'a94':"Thiruvannaamalai",'a95':"Thoothukudi",
'a96':"Tiruchirappalli",'a97':"Tirunelveli",'a98':"Vellore",'a99':"Agartala",
'a100':"Agra",'a101':"Allahabad",'a102':"Bareilly",'a103':"Gorakhpur",'a104':"Jhansi",
'a105':"Kanpur",'a106':"Lucknow",'a107':"Varanasi",'a108':"Dehradun",'a109':"Pantnagar",
'a110':"Kolkata",'a111':"Siliguri"}
latlongs = {'a1':[9.15,92.8167],'a2':[11.6667,92.7167],'a3':[17.45,78.4667],'a4':[14.4833,78.8333],
'a5':[14.1333,77.7833],'a6':[16.9667,81.7667],'a7':[13.65,79.4167],'a8':[16.5333,80.8],
'a9':[17.7,83.3],'a10':[27.0833,93.5667],'a11':[27.4833,95.0167],'a12':[26.0833,91.8333],
'a13':[26.1667,91.5833],'a14':[27.2333,94.1167],'a15':[24.8167,92.8],'a16':[24.75,84.95],
'a17':[25.6,85.1],'a18':[30.7333,76.75],'a19':[21.2333,81.6333],'a20':[20.2833,73],
'a21':[20.4167,72.85],'a22':[28.7833,77.0333],'a23':[28.5667,77.1167],'a24':[15.3833,73.8167],
'a25':[15.3833,73.8167],'a26':[23.0333,72.6167],'a27':[21.75,72.2],'a28':[23.25,69.6667],
'a29':[23.3333,72.5833],'a30':[22.4667,70.0667],'a31':[23.0333,70.2167],'a32':[22.3,70.7833],
'a33':[22.3,73.2667],'a34':[29.1667,75.7333],'a35':[31.25,76.6667],'a36':[32.2,76.4],
'a37':[31.9667,77.1],'a38':[31.1,77.1667],'a39':[32.7,74.8667],'a40':[34.0833,74.8167],
'a41':[22.8167,86.1833],'a42':[23.3167,85.3167],'a43':[12.9833,77.5833],'a44':[15.85,74.6167],
'a45':[15.15,76.85],'a46':[15.35,75.1667],'a47':[12.55,76.9],'a48':[12.9167,74.8833],
'a49':[12.3,76.65],'a50':[9.95,76.2667],'a51':[11.25,75.7667],'a52':[8.46667,76.95],
'a53':[10.9167,72.3333],'a54':[10.5833,72.65],'a55':[23.2833,77.35],'a56':[26.2333,78.2333],
'a57':[22.7167,75.8],'a58':[23.2,79.95],'a59':[21.8333,76.3667],'a60':[24.5667,80.8333],
'a61':[19.0833,74.7333],'a62':[20.7,77.0667],'a63':[19.85,75.4],'a64':[19.8333,75.8833],
'a65':[16.7,74.2333],'a66':[19.1167,72.85],'a67':[21.1,79.05],'a68':[19.8933,73.8],
'a69':[18.55,73.8167],'a70':[18.5333,73.8667],'a71':[17.6667,75.9],'a72':[24.7667,93.9],
'a73':[25.55,91.85],'a74':[23.6667,92.6667],'a75':[25.6667,94.1167],'a76':[20.25,85.8333],
'a77':[21.5833,84.08333],'a78':[10.95,79.7833],'a79':[11.7,75.5333],'a80':[11.9333,79.8833],
'a81':[16.7333,82.2167],'a82':[31.6333,74.8667],'a83':[32.2833,75.65],'a84':[26.8167,75.8],
'a85':[29.1667,75.7333],'a86':[25.15,75.85],'a87':[24.5667,73.6167],'a88':[27.3333,88.6167],
'a89':[13,80.1833],'a90':[11.0333,77.05],'a91':[9.83333,78.0833],'a92':[8.16667,77.4333],
'a93':[8.48333,78.1167],'a94':[12.2167,79.0667],'a95':[8.78333,78.1333],'a96':[10.7667,78.7167],
'a97':[8.73333,77.7],'a98':[12.9167,79.15],'a99':[23.8833,91.25],'a100':[27.15,77.9667],
'a101':[25.45,81.7333],'a102':[28.3667,79.4],'a103':[26.75,83.3667],'a104':[29.1667,75.7333],
'a105':[26.4,80.4],'a106':[26.75,80.8833],'a107':[25.45,83],'a108':[30.3167,78.0333],
'a109':[29.0833,79.5],'a110':[22.65,88.45],'a111':[26.6333,88.3167]
}
for eachOne in allData:
for eachTwo in latlongs:
eachOne_Coordinates_Latitude = eachOne['COORDINATES-Latitude']
latlongs_eachTwo_Latitude_Plus = int(latlongs[eachTwo][0]) + 0.18
latlongs_eachTwo_Latitude_Minus = int(latlongs[eachTwo][0]) - 0.18
eachOne_Coordinates_Longitude = eachOne['COORDINATES-Longitude']
latlongs_eachTwo_Longitude_Plus = int(latlongs[eachTwo][1]) + 0.18
latlongs_eachTwo_Longitude_Minus = int(latlongs[eachTwo][1]) - 0.18
if ( (eachOne_Coordinates_Latitude < latlongs_eachTwo_Latitude_Plus) and (latlongs_eachTwo_Latitude_Plus > latlongs_eachTwo_Latitude_Minus) ) and ( (eachOne_Coordinates_Longitude < latlongs_eachTwo_Longitude_Plus) and (eachOne_Coordinates_Longitude > latlongs_eachTwo_Longitude_Minus) ):
someDict.setdefault((eachOne_Coordinates_Latitude,eachOne_Coordinates_Longitude),[]).append(latlongs[eachTwo])
for each in someDict:
print each,':', min(someDict[each])
MY PROBLEM:
As you know, the latitudes and longitudes that we get from external sources does not exactly match with the latitudes and longitudes that we have. I heard somewhere that they wont match and there will be some error percentage or something.
I need some guidance from anyone. I request someone to please point me in the right direction or if you know any packages or scripts that does this.
I would be extremely thankful to you.
This sounds a lot like a "Closest point problem". You have N points (cities) and M locations (your 8000 coordinates). For each of the M locations, you want to categorize the location by its closest city. There are a number of solutions for the Nearest Neighbor Search, but the simplest one is a linear search:
function getClosestCity(Coordinate location){
bestCity = cities[0];
foreach(city in cities){
if (distance(bestCity.location, location) < distance(city.location, location)){
bestCity = city;
}
}
return bestCity;
}
Related
Simple API Rest python script takes a lot of time (or doesn't end)
For a business process I need to calculate driving distance between 1 origin and 30 k destinations. I get both origins and destinations coordinates from a Google Sheet. Destinations is a matrix (approx 100 x 30). I'm using HERE api to calculate the distance. The result should be the same destinations matrix but with the distance (in the same order as the destinations coordinates). This is the part of the script that calculates the distance and, I think, the one which lasts a lot: distance= [] distance= pd.DataFrame(distance) for row in destinations.itertuples(): a= row[1:] distance1 = [] for column in a: try: args = {'waypoint0': 'geo!'+origins, 'waypoint1': 'geo!'+column, 'mode': 'fastest;truck'} qstr = urlencode(args) url = "https://route.ls.hereapi.com/routing/7.2/calculateroute.json?apiKey=xxxx" + qstr response = urllib.request.urlopen(url) dist = json.loads(response.read())['response']['route'][0]['leg'][0]['length']/1000 except Exception: dist = 10000 distance1.append(dist) distance2 = pd.DataFrame(distance1) distance2 = distance2.T distance = distance.append(distance2) Does anyone think of a better way to make the script to actually finish? Thanks!!
The logic looks pretty much accurate. If you need to limit the loop count, please check Large-Scale Matrix Routing API if it aligns with the use case. The Large-Scale Matrix Routing service is a HTTP JSON API for calculating routing matrices with a large number of start and destinations points (e.g. 10,000 x 10,000). For more details, please refer the following doc : https://developer.here.com/documentation/large-matrix/api-reference-swagger.html Note : please remove appKey from the shared code snippet
What's the most efficient way to fill a longitude / latitude map with circular points of specified radius?
So I'm trying to get around restrictions on a certain company's API that provides data on places of interest. The API doesn't allow me to collect results for an entire state. Instead, I must specify Lat/Lon coordinates and gather the nearby places in a circular 1 to 50000m radius. This API also only returns 60 results at a time, regardless of how many reside in the location specified. I'm aware I will encounter more than 60 places at specified locations, but I'm planning to recursively bisect and process each of these cases. I'd like to use New York State as my first test case. I'm using a shapefile from https://www.arcgis.com/home/item.html?id=b07a9393ecbd430795a6f6218443dccc to get the shape of NY, and shapely to determine whether my point is inside the border. import geopandas as gpd from shapely.geometry import Point, Polygon usa = gpd.read_file('states_21basic/states.shp') polygon = usa[usa.STATE_ABBR == 'NY']['geometry'].values[0] point = Point(-74.005974,40.712776) # create point print(polygon.contains(point)) # check if polygon contains point It's been suggested I try a flood fill algorithm, but I'm not exactly sure how to get a list of spaced out coordinates from it (to minimize API calls) and how to ensure that every part is covered, even in weird shapes like NY. My main goal is to collect all the places whilst minimizing API calls. I'm not really expecting any code, just an idea of how to tackle this. **Unfortunately, I have been removed from the company and won't be able to mark answers as accepted
Your problem might have a good solution not a perfect solution. I worked with maps for a long time and mostly all of them are complex themselves. You have another level of complexity which is an external dependency you cannot control. I had to do something similar at a lower level querying Google Maps to obtain sites on a small area (no more than 20 meters) to try to match the closest place to where I was in terms of geolocation. First, you need to make an assumption and try it. The assumption could be to only use 100 meters radius which more or less cover one block. The second problem you have is density. You can explore a map with zero places of interest for a while (New York has a lot of open areas that might not have nearly anything around). Let's suppose you can enclose your solution in a shape. Suggestion is you start from a predefined point in a dense area (Manhattan?) to maximize the algorithm findings at the beginning. Longer it runs, closer to detect less it will get. The flood fill algorithm is good in your case but it might not be the best. I would probably go with something more complex that follows streets for example but a first approach using flood fill will work. "Flood filling" is like using paint, you walk in a direction; in your case I suggest to use 8, (N, S, W, E, NW, NE, SW, SE), detect if you have "painted it" (you will need to store somewhere you've been in that position already to avoid duplicate calls, and call the API if not. Walking will open a tree of different executions, start from point A, walk N, S, E, W, NW, NE, SW, SE recursively. This recursion might be extreme in an area like New York, 100 meters each in that area will lead to several thousands level of recursion. You will require to optimize it. The third thing to keep into consideration is checking if the point is outside of the polygon. This is pretty straightforward using PNPoly. Once you are outside the polygon, walking in that direction must stop. I share with you the PNPoly implementation in C# I coded for a project: public bool IsCoordinateWithinPolygon(double latitude, double longitude, Polygon polygon) { if(polygon == null || polygon.Coordinates.Count() == 0) { return false; } List<double> coordinatesY = new List<double>(); List<double> coordinatesX = new List<double>(); var minLatitude = double.MaxValue; var maxLatitude = double.MinValue; var minLongitude = double.MaxValue; var maxLongitude = double.MinValue; // Quick-check, determine if the coordinate is outside of the bounding rectangle foreach(var linearRing in polygon.Coordinates) { foreach (var coordinate in linearRing.Coordinates) { coordinatesY.Add(coordinate.Latitude); coordinatesX.Add(coordinate.Longitude); if (coordinate.Latitude < minLatitude) { minLatitude = coordinate.Latitude; } if(coordinate.Latitude > maxLatitude) { maxLatitude = coordinate.Latitude; } if(coordinate.Longitude < minLongitude) { minLongitude = coordinate.Longitude; } if(coordinate.Longitude > maxLongitude) { maxLongitude = coordinate.Longitude; } } } // Determine if the coordinate is outside the bounding box if( (latitude < minLatitude || latitude > maxLatitude) && (longitude < minLongitude || longitude < maxLongitude)) { // Out of the box return false; } // PNPoly Algorithm - Point Inclusion in Polygon Test bool inclusion = false; var verty = coordinatesY.ToArray(); var vertx = coordinatesX.ToArray(); var nvert = vertx.Length; var testy = latitude; var testx = longitude; for (int i = 0, j = nvert - 1; i < nvert; j = i++) { if (((verty[i] > testy) != (verty[j] > testy)) && (testx < (vertx[j] - vertx[i]) * (testy - verty[i]) / (verty[j] - verty[i]) + vertx[i])) inclusion = !inclusion; } return inclusion; }
Decompress Bing Maps GeoData (borders) with Python
I have been trying to decompress the Bing Maps location/border shape algorithm using Python. My end goal is to have custom regions/borders created from combining multiple counties and cities, and save the location data to our database for faster and more accurate location based analysis. My strategy is as follows, but I'm a little stuck on #2, since I can't seem to accurately decompress the code: Retrieve County/City borders from Bing Maps GeoData API - They refer to it as "shape" Decompress their "shape" data to get the latitude and longitude of the border points Remove the points that have the same lat/lng as other shapes (The goal is to make one large shape of multiple counties, as opposed to 5-6 separate shapes) Compress the end result and save in the database The function I am using seems to work for the example of 'vx1vilihnM6hR7mEl2Q' provided in the Point Compression Algorithm documentation. However, when I insert something a little more complex, like Cook County, the formula seems to be off (tested by inserting several of the points into different polygon mapping/drawing applications that also use Bing Maps). It basically creates a line at the south side of Chicago that vigorously goes East and West into Indiana, without much North-South movement. Without knowing what the actual coordinates of any counties are supposed to be, I'm not sure how to figure out where I'm going wrong. Any help is greatly appreciated, even if it is a suggestion of a different strategy. Here is the python code (sorry for the overuse of the decimal format - my poor attempt to ensure the error wasn't a result of inadvertently losing precision): safeCharacters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-' def decodeBingBorder(compressedData): latLng = [] pointsArray = [] point = [] lastLat = Decimal(0) lastLng = Decimal(0) # Assigns the number of of each character based on the respective index of 'safeCharacters' # numbers under 32 indicate it is the last number of the combination of the point, and a new point is begun for char in compressedData: num = Decimal(safeCharacters.index(char)) if num < 32: point.append(num) pointsArray.append(point) point = [] else: num -= Decimal(32) point.append(num) # Loops through each point to determine the lat/lng of each point for pnt in pointsArray: result = Decimal(0) # This revereses step 7 of the Point Compression Algorithm https://msdn.microsoft.com/en-us/library/jj158958.aspx for num in reversed(pnt): if result == 0: result = num else: result = result * Decimal(32) + num # This was pretty much taken from the Decompression Algorithm (not in Python format) at https://msdn.microsoft.com/en-us/library/dn306801.aspx # Determine which diaganal it's on diag = Decimal(int(round((math.sqrt(8 * result + 5) - 1) / 2))) # submtract the total number of points from lower diagonals, and get the X and Y from what's left over latY = Decimal(result - Decimal(diag * (diag + 1) / 2)) lngX = Decimal(diag - latY) # undo the sign encoding if latY % 2 == 1: latY = (latY + Decimal(1)) * Decimal(-1) if lngX % 2 == 1: lngX = (lngX + Decimal(1)) * Decimal(-1) latY /= 2 lngX /= 2 # undo the delta encoding lat = latY + lastLat lng = lngX + lastLng lastLat = lat lastLng = lng # position the decimal point lat /= Decimal(100000) lng /= Decimal(100000) # append the point to the latLng list in a string format, as opposed to the decimal format latLng.append([str(lat), str(lng)]) return latLng The compressed algorithm: 1440iqu9vJ957r8pB_825syB6rh_gXh1-ntqB56sk2B2nq07Mwvq5f64r0m0Fni11ooE4kkvxEy4wzMuotr_DvsiqvFozvt-Lw9znxH-r5oxLv9yxCwhh7wKnk4sB8o0Rvv56D8snW5n1jBg50K4kplClkpqBpgl9F4h4X_sjMs85Ls6qQi6qvqBr188mBqk-pqIxxsx5EpsjosI-8hgIoygDigU94l_4C This is the result: [['41.46986', '-87.79031'], ['41.47033', '-87.52569'], ['41.469145', '-87.23372'], ['41.469395', '-87.03741'], ['41.41014', '-86.7114'], ['41.397545', '-86.64553'], ['41.3691', '-86.47018'], ['41.359585', '-86.41984'], ['41.353585', '-86.9637'], ['41.355725', '-87.43971'], ['41.35561', '-87.52716'], ['41.3555', '-87.55277'], ['41.354625', '-87.63504'], ['41.355635', '-87.54018'], ['41.360745', '-87.40351'], ['41.362315', '-87.29262'], ['41.36214', '-87.43194'], ['41.360915', '-87.44473'], ['41.35598', '-87.58256'], ['41.3551', '-87.59025'], ['41.35245', '-87.59828'], ['41.34782', '-87.60784'], ['41.34506', '-87.61664'], ['41.34267', '-87.6219'], ['41.34232', '-87.62643'], ['41.33809', '-87.63286'], ['41.33646', '-87.63956'], ['41.32985', '-87.65056'], ['41.33069', '-87.65596'], ['41.32965', '-87.65938'], ['41.33063', '-87.6628'], ['41.32924', '-87.66659'], ['41.32851', '-87.71306'], ['41.327105', '-87.75963'], ['41.329515', '-87.64388'], ['41.32698', '-87.73614'], ['41.32876', '-87.61933'], ['41.328275', '-87.6403'], ['41.328765', '-87.63857'], ['41.32866', '-87.63969'], ['41.32862', '-87.70802']]
As mentioned by rbrundritt, storing the data from Big Maps is against the terms of use. However, there are other sources of this same data available, such as http://nationalmap.gov/boundaries.html In the interest of solving the problem, and to store this and other coordinate data more efficiently, I solved the problem by removing the 'round' function when calculating 'diag'. This should be what replaces it: diag = int((math.sqrt(8 * result + 5) - 1) / 2) All of the 'Decimal' crap I added is not necessary, so you can remove it if you wish.
You can also do diag=int(round((sqrt(8 * number + 1)/ 2)-1/2.)) Don't forget to subtract longitude*2 from latitude to get N/E coordinates!
Maybe it will be usefull, i found bug in code. invert pair function should be diag = math.floor((math.sqrt(8 * result + 1) - 1) / 2) after fixing this, your implementation work correct
You can't store the boundary data from the Bing Maps GeoData API or any data derived from it in a database. This is against the terms of use of the platform.
GeoDjango + PostGIS calculates wrong Distances
I just installed PostGIS with GeoDjango. Everything worked fine, but now I have a problem and cant find out the reason for this. I have model like this: from django.contrib.gis.db import models class Shop(models.Model): name = models.CharField(max_length=80) point = models.PointField(null=True, blank=True) objects = models.GeoManager() And I set its point to this position (49.794254,9.927489). Then i create a point like this: pnt = fromstr('POINT(50.084068 8.238381)') The distance between this points should be about ~ 125 km, but when i do this: results = Shop.objects.distance(pnt) print results[0].distance.km I'm getting always about 60 km too much in my result, so it returns 190 km! My SRIDs of both points are 4326... probably something wrong with that? And maybe another interesting fact, when i do this: pnt.distance(shop.point) it returns 1.713790... as a result. What am I doing wrong? Any alternatives for me to use with python + django? If there is a better solution I would not need to use PostGIS. Hope you can help me! Chris
I just ran this query in postgis : select round(CAST(ST_Distance_Sphere(ST_GeomFromText('POINT(49.794254 9.927489)',4326), ST_GeomFromText('POINT(50.084068 8.238381)',4326)) As numeric)/1000.0,2) as distance_km; distance_km ------------- 190.50 the result is in fact 190.50, so it seems there's nothing wrong with your 190 km result same result with this awesome page, there is a brief explanation of how to calculate this distances. the 1.713790... result seems to be in the same units of the srid, or in other words that number is not in meters. EDIT Ooohh I just saw your problem you misplaced the lat and lon, in the WKT format, Longitude comes first so the real query should be: select round(CAST(ST_Distance_Sphere(ST_GeomFromText('POINT(9.927489 49.794254)',4326), ST_GeomFromText('POINT(8.238381 50.084068)',4326)) As numeric)/1000.0,2) as distance_km; distance_km ------------- 125.10 so the points should be created like this POINT(9.927489 49.794254) POINT(8.238381 50.084068)
Store 3 nearest coordinates
I have an XML file that contains a number of points with their longitude and latitude. My python code at the moment gets the nearest point by simply looping through the XML file, finding the nearest, in miles or whatever, then comparing it with the previous closest point. If its nearer then I assign the variable the value of this new point. So everything is working in that regard. Now, what I want to do is actually store the closest 2 or 3 points. How do I go about doing this? The XML file isn't ordered by closest, and besides, the users location will change each time a request is made. Can I do this with an XML file or will I perhaps have to look into storing the data is SQL Server or MySQL? Thanks for the help. PS, the sample code is available here if anyone is interested. This is part of a college project.
You should store in a list of tuples (for example) all the point pairs and their distances as you parse de xml file. mypoints = [(distance12, x1, x2),...,(distancenm, xn, xm)] mypoints.sort() three_closer = mypoints[:3] Adapting this to your code: .............. mypoints = [] for row in rows: # Get coords for current record curr_coords = row.getAttribute("lat") + ',' + row.getAttribute("lng") # Get distance tempDistance = distance.distance(user_coords, curr_coords).miles mypoints.append((tempDistance, row)) mypoints.sort() #the three closest points: mythree_shorter = mypoints[0:3] for distance, row in mythree_shorter: shortestStation = json.dumps( {'number': row.getAttribute("number"), 'address': row.getAttribute("address"), 'lat': row.getAttribute("lat"), 'lng': row.getAttribute("lng"), 'open': row.getAttribute("open")}, sort_keys=True, indent=4) save_in_some_way(shortestStation) #maybe writing to a file? ..................
Here's a solution that will work for any number of points: closest = points[:NUM_CLOSEST] closest.sort() for point in points[NUM_CLOSEST:]: if point.distance < closest[-1].distance: closest[-1] = point closest.sort() Obviously, a bit pseudo-cody. The sort() calls will probably need an argument so they are sorted in a useful way, and you'll probably want a function to calculate the distance to replace the distance member.