How to show Company CI icon useing MarkerCluster in Folium - python

from folium.plugins import MarkerCluster
import folium
from folium.features import CustomIcon
import pandas as pd
import folium.plugins as plug
df = pd.read_excel('store.xlsx')
df1 = df[['대리점명','Latitude','Longitude']]
df1.rename(columns= {'대리점명':'Store'}, inplace=True)
df1.head()
x = []
y = []
name = []
for i in range(len(df1['Latitude'])):
if df['Latitude'][i] == 0.0 or df['Longitude'][i] == 0.0:
pass
else:
name.append(df1['Store'][i])
x.append(df1['Latitude'][i])
y.append(df1['Longitude'][i])
print('store_sum: ',len(name))
print('x_sum: ',len(x))
print('y_sum: ',len(y))
import folium
import folium.plugins as plug
import json
map_store = folium.Map(location=[37.58, 127.0],zoom_start=11.5)
marker_cluster = plug.MarkerCluster().add_to(map_store)
file_name= r'seoul_municipalities_geo.json'
file_name = file_name.replace('\\','/')
with open(file_name, 'rt') as f:
geo = json.load(f)
f.close()
folium.GeoJson(geo, name='Store').add_to(map_store)
for i in range(len(x)):
folium.Marker([x[i],y[i]], popup= name[i], icon=folium.Icon(color='purple', icon='ok-circle')).add_to(marker_cluster)
map_store
sub_df = df
latitude = 37.58
longitude = 127.0
mm = folium.Map(
location=[latitude, longitude],
zoom_start=11.5
)
coords = sub_df[['Latitude', 'Longitude']]
marker_cluster = MarkerCluster().add_to(mm)
for lat, long in zip(coords['Latitude'], coords['Longitude']):
icon_path = r"hci.png"
icon = CustomIcon(
icon_image=icon_path,
icon_size=(180, 80),
icon_anchor=(50, 50),
)
marker = folium.Marker(location=[lat, long], icon=icon, popup="대리점명")
mm.add_child(marker)
mm
I tried many time But really I cant
And you can't understand my mean about bad English or can't understand my question
please look under url
https://towardsdatascience.com/visualizing-tesla-superchargers-in-france-8c10894ab3c
This is perfect example I want to mimic like this
Anyway in first image, change purple checkbox Marker to company CI(CI image is in second picuture)

The point is that each marker must be set to belong to a marker cluster. Latitude and longitude and store name are created randomly. Also, the logo is a stack overflow logo.
import numpy as np
import pandas as pd
import folium
from folium.plugins import MarkerCluster
from folium.features import CustomIcon
df = pd.DataFrame({'Store':name_list,
'Latitude': [random.uniform(36.0, 38.0) for _ in range(100)],
'Longitude': [random.uniform(126.0, 128.0) for _ in range(100)]})
df.head()
Store Latitude Longitude
0 ocUReOT56a 36.164013 127.045411
1 KbAQtbB5eG 36.534577 127.515191
2 PdiCOLvjC4 36.715178 126.333321
3 eM33oRtVii 37.889212 126.589194
4 nporJ7t4mY 36.604549 127.563762
latitude = 37.58
longitude = 127.0
mm = folium.Map(location=[latitude, longitude], zoom_start=10)
marker_cluster = MarkerCluster().add_to(mm)
for row in df.itertuples():
icon_path = r"./data/240px-Stack_Overflow_icon.svg.png"
icon = CustomIcon(icon_image=icon_path, icon_size=(50, 50))
folium.Marker(location=[row.Latitude, row.Longitude], icon=icon, popup=row.Store).add_to(marker_cluster)
mm

Related

Plotly scatter_mapbox python: How to update the displayed scatter_mapbox

i want to plot open street map points in a scatter_mapbox. To plot the data it is no problem.
But when I try to update the data the new data will not shown in the plot.
First, when I call again the .show function the new result will be displayed in a new firefox tab. How can I solve it that I get an update in the same windows.
I want to create dropdown menus und input boxes for the costumer and when he change the data, it should be shown in the same window.
Also nice to have improvements for the rest of the code.
`
import pandas as pd
import matplotlib.pyplot as plt
import plotly.express as px
import plotly.graph_objects as go
from osm.osm_reader import OSMHandler, calcDistanceBetweenTwoGPS
osmhandler = OSMHandler()
osmhandler.apply_file("data/osm/hamburg.osm")
data_colnames_ways = ['type', 'id', 'nodeId', 'version', 'visible', 'name', 'ts', 'uid',
'user', 'chgset']
data_colnames_nodes = ['type', 'id', 'version', 'visible', 'ts', 'uid',
'user', 'chgset', 'lat', 'lon']
df_osm_ways = pd.DataFrame(osmhandler.osm_ways, columns=data_colnames_ways)
df_osm_ways = df_osm_ways.sort_values(by=['type', 'id', 'ts'])
df_osm_nodes = pd.DataFrame(osmhandler.osm_nodes, columns=data_colnames_nodes)
df_osm_nodes = df_osm_nodes.sort_values(by=['type', 'id', 'ts'])
df_traffic_nodes = pd.DataFrame(osmhandler.traffic_nodes, columns=data_colnames_nodes)
df_traffic_nodes = df_traffic_nodes.sort_values(by=['type', 'id', 'ts'])
from accident_atlas.csv_reader import CsvReader
csv_reader = CsvReader()
csv_reader.set_data_path("data/aatlas/Unfallorte2021_LinRef.csv")
accident_data = csv_reader.get_data(lat_limits=(min(df_osm_nodes["lat"]), max(df_osm_nodes["lat"])),
lon_limits=(min(df_osm_nodes["lon"]), max(df_osm_nodes["lon"])))
accident_data["CloseToNode"] = False
max_distance = 50
df_traffic_nodes["CloseAccidents"] = 0
for idx, row_x in accident_data.iterrows():
for idy, row_y in df_traffic_nodes.iterrows():
if max_distance > calcDistanceBetweenTwoGPS(row_x["YGCSWGS84"], row_y["lat"], row_x["XGCSWGS84"], row_y["lon"]):
df_traffic_nodes.loc[idy, "CloseAccidents"] += 1
if not accident_data["CloseToNode"][idx]:
accident_data.loc[idx, "CloseToNode"] = True
df_acdata_filtered = accident_data[accident_data["CloseToNode"] == True]
df_traffic_nodes_filtered = df_traffic_nodes[df_traffic_nodes["CloseAccidents"] >= 0]
fig = px.scatter_mapbox(data_frame=df_traffic_nodes_filtered, lat=df_traffic_nodes_filtered["lat"], lon=df_traffic_nodes_filtered["lon"], color="CloseAccidents",
zoom=12, height=800, size_max = 50, hover_name="CloseAccidents", color_continuous_scale="bluered")
fig2 = px.scatter_mapbox(data_frame=None, lat=df_acdata_filtered["YGCSWGS84"], lon=df_acdata_filtered["XGCSWGS84"], color_discrete_sequence=["Black"],
zoom=12, height=800, size_max = 50)
fig.add_trace(fig2.data[0])
fig.update_layout(title = "Traffic lights on roads with number of accidents.")
fig.update_layout(mapbox_style="open-street-map")
f = go.FigureWidget(fig)
f.show()
f.data[0]["legendgroup"] = "test"
f.data[1]["legendgroup"] = "test2"
fig.update_traces(lat=fig.data[1].lat[-1], lon= fig.data[1].lon[-1], selector=dict(legendgroup= 'test'))
print("Test")
`
I tried to plot data to a scatter_mapbox and change the data after the show.

Geopandas: different .sjoin() results with different projections systems

I tried to run a spatial join between a list of assets and a river basin dataset that you can find at the link below
https://datasets.wri.org/dataset/aqueduct-global-flood-risk-maps?msclkid=630fc948b63611ec9931936b22cf4990
The first approach was a join on an ESPG 4326 projection setting and it works fine.
rfd = r"C:\Users\~\aqueduct_global_flood_risk_data_by_river_basin_20150304.shp"
wri_rfr = gpd.read_file(rfd, crs='epsg:4326')
test = ['Unit 1', 'Unit 2' ]
test_lat = ['0.176095', '-24.193790']
test_lon = ['117.495523', '150.370650']
df = pd.DataFrame()
df['Name'] = test
df['Latitude'] = test_lat
df['Longitude'] = test_lon
gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df['Longitude'], df['Latitude']))
gdf = gdf.set_crs('epsg:4326')
joined = gpd.sjoin(gdf, wri_rfr, how='inner')
len(joined )
The two assets have both a join.
In a second approach, I try to create a 500 mt buffer around my assets using a meter-based projection system (3006) and proceed to merge them...but it returns no result?
rfd = r"C:\Users\~\aqueduct_global_flood_risk_data_by_river_basin_20150304.shp"
wri_rfr = gpd.read_file(rfd, crs='epsg:4326')
test = ['Unit 1', 'Unit 2' ]
test_lat = ['0.176095', '-24.193790']
test_lon = ['117.495523', '150.370650']
df = pd.DataFrame()
df['Name'] = test
df['Latitude'] = test_lat
df['Longitude'] = test_lon
gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df['Longitude'], df['Latitude']))
gdf = gdf.set_crs('epsg:4326')
gdf = gdf.to_crs({'init': 'epsg:3006'})
gdf.geometry = gdf.geometry.buffer(500)
gdf = gdf.loc[gdf.is_valid]
wri_rfr_3006 = wri_rfr.to_crs({'init': 'epsg:3006'})
wri_rfr_3006 = wri_rfr_3006.loc[wri_rfr_3006.is_valid]
joined = gpd.sjoin(gdf, wri_rfr_3006 , how='inner')
len(joined )
it returns no joins.
What am I missing here? Why would be the results different?
have coded up data sourcing of shape files
take a look at documentation https://epsg.io/3006 this is for Sweden. Hence locations in Borneo and Australia are going to start to give rounding errors when expressed in meters from Sweden
have taken approach of work out UTM CRS of each point, buffer it, then convert back to epsg:4386
with buffered point geometry can now spatial join as an inappropriate CRS for global geometry has not been used
test = ["Unit 1", "Unit 2"]
test_lat = ["0.176095", "-24.193790"]
test_lon = ["117.495523", "150.370650"]
df = pd.DataFrame()
df["Name"] = test
df["Latitude"] = test_lat
df["Longitude"] = test_lon
gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df["Longitude"], df["Latitude"]))
gdf = gdf.set_crs("epsg:4326")
# work out UTM CRS for each point, then buffer it and return back as original CRS
def buffer_meter(g, crs="epsg:6666", buffer=50):
t = gpd.GeoDataFrame(geometry=[g], crs=crs)
return t.to_crs(t.estimate_utm_crs()).buffer(buffer).to_crs(crs).values[0]
# buffer the points
gdf["geometry"] = gdf["geometry"].apply(buffer_meter, crs=gdf.crs, buffer=500)
# now join
gpd.sjoin(gdf, wri_rfr, how='inner')
data sourcing
import requests
from pathlib import Path
from zipfile import ZipFile
import urllib
import geopandas as gpd
import pandas as pd
# download data sets
urls = [
"http://datasets.wri.org/dataset/c19396d9-45c8-4e92-bf05-d1411c9cc2ca/resource/498319f7-992a-4447-94b4-c62d8f1daa38/download/aqueductglobalfloodriskdatabycountry20150304.zip",
"http://datasets.wri.org/dataset/c19396d9-45c8-4e92-bf05-d1411c9cc2ca/resource/471ef133-939c-4ca6-9b1c-5f81b5251c2b/download/aqueductglobalfloodriskdatabyriverbasin20150304.zip",
"http://datasets.wri.org/dataset/c19396d9-45c8-4e92-bf05-d1411c9cc2ca/resource/dd90c26a-edf2-46e4-be22-4273ab2344d0/download/aqueductglobalfloodriskdatabystate20150304.zip",
]
dfs = {}
for url in urls:
f = Path.cwd().joinpath(urllib.parse.urlparse(url).path.split("/")[-1])
if not f.exists():
r = requests.get(url, stream=True, headers={"User-Agent": "XY"})
with open(f, "wb") as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
zfile = ZipFile(f)
zfile.extractall(f.stem)
dfs[f.stem] = gpd.read_file(list(f.parent.joinpath(f.stem).glob("*.shp"))[0])
wri_rfr = dfs["aqueductglobalfloodriskdatabyriverbasin20150304"]

Retrieving data from the Air Quality Index (AQI) website through the API and only recieving small nr. of stations

I'm working on a personal project and I'm trying to retrieve air quality data from the https://aqicn.org website using their API.
I've used this code, which I've copied and adapted for the city of Bucharest as follows:
import pandas as pd
import folium
import requests
# GET data from AQI website through the API
base_url = "https://api.waqi.info"
path_to_file = "~/path"
# Got token from:- https://aqicn.org/data-platform/token/#/
with open(path_to_file) as f:
contents = f.readlines()
key = contents[0]
# (lat, long)-> bottom left, (lat, lon)-> top right
latlngbox = "44.300264,25.920181,44.566991,26.297836" # For Bucharest
trail_url=f"/map/bounds/?token={key}&latlng={latlngbox}" #
my_data = pd.read_json(base_url + trail_url) # Joined parts of URL
print('columns->', my_data.columns) #2 cols ‘status’ and ‘data’ JSON
### Built a dataframe from the json file
all_rows = []
for each_row in my_data['data']:
all_rows.append([each_row['station']['name'],
each_row['lat'],
each_row['lon'],
each_row['aqi']])
df = pd.DataFrame(all_rows, columns=['station_name', 'lat', 'lon', 'aqi'])
# Cleaned the DataFrame
df['aqi'] = pd.to_numeric(df.aqi, errors='coerce') # Invalid parsing to NaN
# Remove NaN entries in col
df1 = df.dropna(subset = ['aqi'])
Unfortunately it only retrieves 4 stations whereas there are many more available on the actual site. In the API documentation the only limitation I saw was for "1,000 (one thousand) requests per second" so why can't I get more of them?
Also, I've tried to modify the lat-long values and managed to get more stations, but they were outside the city I was interested in.
Here is a view of the actual perimeter I've used in the embedded code.
If you have any suggestions as of how I can solve this issue, I'd be very happy to read your thoughts. Thank you!
Try using waqi through aqicn... not exactly a clean API but I found it to work quite well
import pandas as pd
url1 = 'https://api.waqi.info'
# Get token from:- https://aqicn.org/data-platform/token/#/
token = 'XXX'
box = '113.805332,22.148942,114.434299,22.561716' # polygon around HongKong via bboxfinder.com
url2=f'/map/bounds/?latlng={box}&token={token}'
my_data = pd.read_json(url1 + url2)
all_rows = []
for each_row in my_data['data']:
all_rows.append([each_row['station']['name'],each_row['lat'],each_row['lon'],each_row['aqi']])
df = pd.DataFrame(all_rows,columns=['station_name', 'lat', 'lon', 'aqi'])
From there its easy to plot
df['aqi'] = pd.to_numeric(df.aqi,errors='coerce')
print('with NaN->', df.shape)
df1 = df.dropna(subset = ['aqi'])
df2 = df1[['lat', 'lon', 'aqi']]
init_loc = [22.396428, 114.109497]
max_aqi = int(df1['aqi'].max())
print('max_aqi->', max_aqi)
m = folium.Map(location = init_loc, zoom_start = 5)
heat_aqi = HeatMap(df2, min_opacity = 0.1, max_val = max_aqi,
radius = 60, blur = 20, max_zoom = 2)
m.add_child(heat_aqi)
m
Or as such
centre_point = [22.396428, 114.109497]
m2 = folium.Map(location = centre_point,tiles = 'Stamen Terrain', zoom_start= 6)
for idx, row in df1.iterrows():
lat = row['lat']
lon = row['lon']
station = row['station_name'] + ' AQI=' + str(row['aqi'])
station_aqi = row['aqi']
if station_aqi > 300:
pop_color = 'red'
elif station_aqi > 200:
pop_color = 'orange'
else:
pop_color = 'green'
folium.Marker(location= [lat, lon],
popup = station,
icon = folium.Icon(color = pop_color)).add_to(m2)
m2
checking for stations within HK, returns 19
df[df['station_name'].str.contains('HongKong')]

In Bokeh, how can I display different information for points and patches?

I want to display different information for different layers (points and patches) using bokeh.
I downloaded the shapefile and the population information of Haitian cities respectively from here and from here and I merged them.
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import geopandas as gpd
import osmnx as ox
from bokeh.layouts import row, column
from bokeh.models import Select
from bokeh.palettes import Spectral5
from bokeh.plotting import curdoc, figure, save
from bokeh.sampledata.autompg import autompg_clean as df
from bokeh.io import show
from bokeh.models import LogColorMapper
from bokeh.palettes import Viridis6 as palette
from bokeh.plotting import figure
from bokeh.models import ColumnDataSource
from bokeh.sampledata.us_counties import data as counties
from bokeh.sampledata.unemployment import data as unemployment
import pandas as pd
import geopandas as gpd
import shapely
color_mapper = LogColorMapper(palette=palette)
Some functions
def getPolyCoords(row, geom, coord_type):
"""Returns the coordinates ('x' or 'y') of edges of a Polygon exterior"""
# Parse the exterior of the coordinate
exterior = row[geom].exterior
if coord_type == 'x':
# Get the x coordinates of the exterior
return list( exterior.coords.xy[0] )
elif coord_type == 'y':
# Get the y coordinates of the exterior
return list( exterior.coords.xy[1] )
def getPointCoords(row, geom, coord_type):
"""Calculates coordinates ('x' or 'y') of a Point geometry"""
if coord_type == 'x':
return row[geom].x
elif coord_type == 'y':
return row[geom].y
Cities data
haiti = gpd.read_file(hti_admbnda_adm2_cnigs_20181129.shp')
haiti = haiti.to_crs({'init': 'epsg:32618'})
haiti = haiti[haiti.index != 98].reset_index(drop=True) ## i=98 is corrupted
pop = pd.read_csv('hti_admnbnda_adm2_cnigs2013c.csv')
level = 2
left = 'adm%dcode'%level
right = 'ADM%d_PCODE'%level
h_geom = pd.merge(pop, haiti, left_on=left, right_on=right)
Then I created a data for bokeh
grid = pd.DataFrame()
grid['x'] = h_geom.apply(getPolyCoords, geom='geometry', coord_type='x', axis=1)
grid['y'] = h_geom.apply(getPolyCoords, geom='geometry', coord_type='y', axis=1)
grid['Name'] = h_geom['adm2_en']
grid['Population'] = h_geom['TOTAL']
data=dict(
x=list(grid['x'].values),
y=list(grid['y'].values),
name=list(grid['Name'].values),
rate=list(grid['Population'].values),
)
From osmnx I get points of schools
selected_amenities = ['school']
place = 'Haiti'
schoolOSM = ox.pois_from_place(place=place, amenities=selected_amenities)
schools = gpd.GeoDataFrame(schoolOSM)
idxok = []
for i in schools.index:
if type(schools['geometry'][i]) == shapely.geometry.point.Point:
idxok.append(i)
schools = schools[schools.index.isin(idxok)]
schools['x'] = schools.apply(getPointCoords, geom='geometry', coord_type='x', axis=1)
schools['y'] = schools.apply(getPointCoords, geom='geometry', coord_type='y', axis=1)
data1=dict(
x=list(schools['x'].values),
y=list(schools['y'].values),
)
Then I want to show the information: I would like to show Name, Population and coordinates for cities while only coordinates for schools.
TOOLS = "pan,wheel_zoom,reset,hover,save"
p = figure(title="Schools Point in Haiti", tools=TOOLS,
x_axis_location=None, y_axis_location=None,
tooltips=[("Name", "#name"), ("Population", "#rate"), ("(Long, Lat)", "($x, $y)")])
p.hover.point_policy = "follow_mouse"
p.patches('x', 'y', source=data,
fill_color={'field': 'rate', 'transform': color_mapper},
fill_alpha=1.0, line_color="black", line_width=1)
# Add points on top (as black points)
p.circle('x', 'y', size=3, source=data1, color="black")
show(p)
In doing so I get the information of Name, Population, Long, Lat for both Schools and Cities. But Schools do not have the info Name and Population, so I get something like
You need to create two separate data sources and two separate HoverTools.
from bokeh.models import HoverTool
data_cities = dict(x = list(cities['x'].values), y = list(cities['y'].values))
data_schools = dict(x = list(schools['x'].values), y = list(schools['y'].values))
cities = p.circle('x', 'y', size = 3, source = data_cities, color = "green")
schools = p.circle('x', 'y', size = 3, source = data_schools, color = "blue")
hover_cities = HoverTool(renderers = [cities], tooltips = [("Name", "#name"), ("Population", "#rate"), ("(Long, Lat)", "($x, $y)")]))
hover_schools = HoverTool(renderers = [schools], tooltips = [("(Long, Lat)", "($x, $y)")]))
p.add_tools(hover_cities)
p.add_tools(hover_schools)

Python: bokeh not generating world's countries map as expected

I am trying to generate world's countries map colored by some values but some countries are missing from the generated map. Here are my codes:
Get the geojson data
import urllib
url = 'https://raw.githubusercontent.com/datasets/geo-boundaries-world-110m/master/countries.geojson'
testfile = urllib.URLopener()
if os.path.exists('countries.json'):
print "file already exists"
else:
testfile.retrieve(url, "countries.json")
Use bokeh to generate the map
import json,pandas
from collections import OrderedDict
from bokeh.plotting import figure, show, output_file, ColumnDataSource
from bokeh.models import HoverTool
user_by_country_count = pandas.read_csv('data.csv')
with open('countries.json','r') as f:
geodata = json.load(f)
f.close()
geodata_features = geodata['features']
country_xs = []
country_ys = []
country_names = []
country_num_users = []
country_colors = []
colors = ['#CCE5FF','#CCCCFF','#9999FF','#6666FF','#3333FF',
'#0000FF','#0000CC','#000099','#000066','#0000CC']
for aCountry in geodata_features:
coords = aCountry['geometry']['coordinates'][0]
country_xs.append(map(lambda x:x[0],coords))
country_ys.append(map(lambda x:x[1],coords))
cName = aCountry['properties']['name']
country_names.append(cName)
if cName in user_by_country_count['Country'].values:
num_users = user_by_country_count['Count'][user_by_country_count[user_by_country_count.Country==cName].index[0]]
country_num_users.append(num_users)
country_colors.append(colors[int(np.log(num_users))])
else:
country_num_users.append(0)
country_colors.append("#00FF80")
source = ColumnDataSource(
data = dict(
x=country_xs,
y=country_ys,
color=country_colors,
name=country_names,
users=country_num_users,
)
)
output_file("global.html", title="global.py example")
TOOLS="pan,wheel_zoom,box_zoom,reset,hover,save"
p = figure(title="Upwork Users Location", tools=TOOLS)
p.patches('x', 'y',
fill_color='color', fill_alpha=0.7,
line_color="white", line_width=0.5,
source=source)
hover = p.select(dict(type=HoverTool))
hover.point_policy = "follow_mouse"
hover.tooltips = OrderedDict([
("Name", "#name"),
("Number of Users", "#users"),
])
show(p)
I suspect that the problem might be in the geojson data. When I looked carefully, it looks like some coordinates are given as list of numbers while others are given as list of list of numbers. But this geojson was used before by many people so I wonder if anybody else would have encountered a similar issue.
Argentina and some other countries are of the type MultiPolygon, Brazil for example is of the type Polygon. Countries that have islands or separate lands are of the type MultiPolygon. So coordinates for MultiPolygon should contain one more level of arrays, and you should handle that:
#!/usr/bin/python2
import json,pandas
from collections import OrderedDict
from bokeh.plotting import figure, show, output_file, ColumnDataSource
from bokeh.models import HoverTool
import math
user_by_country_count = pandas.read_csv('data.csv')
with open('countries.json','r') as f:
geodata = json.load(f)
geodata_features = geodata['features']
country_xs = []
country_ys = []
country_names = []
country_num_users = []
country_colors = []
colors = ['#CCE5FF','#CCCCFF','#9999FF','#6666FF','#3333FF',
'#0000FF','#0000CC','#000099','#000066','#0000CC']
for aCountry in geodata_features:
cName = aCountry['properties']['name']
country_names.append(cName)
geometry_type = aCountry['geometry']['type']
if geometry_type == "MultiPolygon":
for poly_coords in aCountry['geometry']['coordinates']:
coords = poly_coords[0]
country_xs.append(map(lambda x:x[0],coords))
country_ys.append(map(lambda x:x[1],coords))
else:
coords = aCountry['geometry']['coordinates'][0]
country_xs.append(map(lambda x:x[0],coords))
country_ys.append(map(lambda x:x[1],coords))
if cName in user_by_country_count['Country'].values:
num_users = user_by_country_count['Count'][user_by_country_count[user_by_country_count.Country==cName].index[0]]
country_num_users.append(num_users)
country_colors.append(colors[int(math.log(num_users))])
else:
country_num_users.append(0)
country_colors.append("#00FF80")
source = ColumnDataSource(
data = dict(
x=country_xs,
y=country_ys,
color=country_colors,
name=country_names,
users=country_num_users,
)
)
output_file("global.html", title="global.py example")
TOOLS="pan,wheel_zoom,box_zoom,reset,hover,save"
p = figure(title="Upwork Users Location", tools=TOOLS)
p.patches('x', 'y',
fill_color='color', fill_alpha=0.7,
line_color="white", line_width=0.5,
source=source)
hover = p.select(dict(type=HoverTool))
hover.point_policy = "follow_mouse"
hover.tooltips = OrderedDict([
("Name", "#name"),
("Number of Users", "#users"),
])
show(p)

Categories