Add Markers to folium map from SQLite3 table - python

I am trying to drop many markers on a folium map. The coordinates are drawn from a SQLite3 Table but right now no map is displaying and no error is being thrown.
def maps():
melbourne = (-37.840935, 144.946457)
map = folium.Map(location = melbourne)
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Connected to SQLite")
sqlite_select_query = """SELECT latitude, longitude FROM test555;"""
cursor.execute(sqlite_select_query)
items = cursor.fetchall()
for item in items:
folium.Marker(location = item)
cursor.close()
except sqlite3.Error as error:
print("Failed to read data from sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
I tried to make "item" a list folium.Marker(location = [item]) but that threw the following error ValueError: Expected two (lat, lon) values for location, instead got: [(-37.7650309, 144.9613659)].
This suggests to me that the variable is not wrong but something else is broken somewhere.
Thanks in advance!

In order to extract the tuple (-37.7650309, 144.9613659) from the list, you just need to take the first element : folium.Marker(location = item[0])
You also need to add the marker to the map : folium.Marker(location = item[0]).add_to(map)
In order to draw the map, you need to return it at the end of your function.
You will have something like this (it works in my Jupyter Notebook) :
def maps():
melbourne = (-37.840935, 144.946457)
map = folium.Map(location = melbourne)
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Connected to SQLite")
sqlite_select_query = """SELECT latitude, longitude FROM test555;"""
cursor.execute(sqlite_select_query)
items = cursor.fetchall()
for item in items:
folium.Marker(location = item[0]).add_to(map)
cursor.close()
except sqlite3.Error as error:
print("Failed to read data from sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
return map
N.B:
You should not use map as the name of your variable because you shadow the map() function of the Python standard library.

Related

executing a sql query using python

I'm trying to create a small python app to extract data from specific table of database.
The extracted rows have to be between CREATION_DATETIME specified by user.
Heres the code:
startdate = input("Prosze podac poczatek przedzialu czasowego (format RRRR-MM-DD GG:MM:SS): ")
enddate = input("Prosze podac koniec przedzialu czasowego (format RRRR-MM-DD GG:MM:SS): ")
query = "SELECT * FROM BRDB.RFX_IKW_MODIFY_EXEC_ORDER_CANCEL_LOG WHERE CREATION_DATETIME between '%s' and '%s' ORDER BY CREATION_DATETIME DESC;"
tuple1 = (startdate, enddate)
cursor.execute(*query, (tuple1,))
records = cursor.fetchall()
print("Total number of rows in table: ", cursor.rowcount)
print(records)
I'm not much of developer and I'm stuck at error "TypeError: CMySQLCursorPrepared.execute() takes from 2 to 4 positional arguments but 104 were given" in various counts, depends on how I try to modify the code.
Could you guys help me out in specyfing that query correctly?
Thank you in advance.
Tried various tutorial about parametrized query but with no luck.
You're starring the query, making it an iterable of the characters making up the string, which probably isn't what you meant (i.e., you should emove the * operator). In addition, tuple1 is already a tuple, you shouldn't enclose it inside another tuple:
cursor.execute(query, tuple1)
# Remove the *-^
# Use tuple1 directly-^
here is the full code
import mysql.connector
from mysql.connector import Error
try:
print("Laczenie z baza danych....")
connection = mysql.connector.connect(host='',
port='',
database='',
user='',
password='')
if connection.is_connected():
db_Info = connection.get_server_info()
print("Wersja servera MySQL:", db_Info)
cursor = connection.cursor(prepared=True)
cursor.execute("select database();")
record = cursor.fetchone()
print("Pomyslnie polaczono z baza danych: ", record)
except Error as e:
print("Blad polaczenia!", e)
quit()
try:
startdate = input("Prosze podac poczatek przedzialu czasowego (format RRRR-MM-DD GG:MM:SS): ")
enddate = input("Prosze podac koniec przedzialu czasowego (format RRRR-MM-DD GG:MM:SS): ")
query = "SELECT * FROM BRDB.RFX_IKW_MODIFY_EXEC_ORDER_CANCEL_LOG WHERE CREATION_DATETIME between '%s' and '%s' ORDER BY CREATION_DATETIME DESC;"
tuple1 = (startdate, enddate,)
cursor.execute(query, tuple1)
records = cursor.fetchall()
print("Fetching each row using column name")
for row in records:
message_id = row["MESSAGE_ID"]
executable_order_id = row["EXECUTABLE_ORDER_ID"]
creation_datetime = row["CREATION_DATETIME"]
message_type = row["MESSAGE_TYPE"]
message_status = row["MESSAGE_STATUS"]
print(message_id, executable_order_id, creation_datetime, message_status)
except mysql.connector.Error as e:
print("Error reading data from MySQL table", e)
finally:
if connection.is_connected():
cursor.close()
connection.close()
print("MySQL connection is closed")

Incorporating my existing code as a Django Model?

I have built a program that uses a webscraper and geopy to extract venue name at Latitude/Longitude with the aim of putting the venues onto a map on a website.
I am currently using Django and need to begin creating my database for the website. Is it possible to incorporate my pre-existing program code as a Django model and have the website draw from a database I have already written the code to create?
Here is the code I have written for that webscrapes, geocodes and puts into database, can it be integrated into my django code as a model? What would I need to change to make it function well?:
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_names)
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#print(cafeNamesClean)
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_addresses)
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
#print(cafeAddressesClean)
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
lat = []
long = []
try:
for address in cafeAddressesClean:
location = locator.geocode(address.strip().replace(',',''))
lat.append(location.latitude)
long.append(location.longitude)
except:
lat.append(None)
long.append(None)
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test666 (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT,
longitude FLOAT
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test666
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")

Unbound local error does not occur consistently

I am trying to add data to my SQlite3 table which runs on a function that takes two arguments to find a city and a neighbourhood def scrapecafes(city, area) Strangely, this works well with some of the arguments I am entering but not with others. For example if I run scrapecafes(melbourne, thornbury) the code works fine, but if I run scrapecafes(melbourne, carlton I get the following error: UnboundLocalError: local variable 'lat' referenced before assignment
I know the function definitely works, but I can't figure out why I am getting the UnboundLocalError for some arguments but not for others. Here is the code:
import folium
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_names)
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#print(cafeNamesClean)
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_addresses)
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
#print(cafeAddressesClean)
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
try:
location = []
for item in cafeAddressesClean:
location.append(locator.geocode(item))
lat = [loc.latitude for loc in location]
long = [loc.longitude for loc in location]
except:
pass
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test555 (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT NOT NULL,
longitude FLOAT NOT NULL
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test555
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
The problem is geopy doesn't have co-ordinates for Carlton. Hence, you should change your table schema and insert null in those cases.
When geopy doesn't have data, it returns None and when try to call something on None it throws exception. You have to put the try/except block inside the for loop.
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
lat = []
long = []
for item in cafeAddressesClean:
try:
location = locator.geocode(item.strip().replace(',',''))
lat.append(location.latitude)
long.append(location.longitude)
except:
lat.append(None)
long.append(None)
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT,
longitude FLOAT
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
scrapecafes('melbourne', 'carlton')

Python Pyodbc Binary Column is returning \xda\x08\xcd\x08\xba\x08 instead of numbers

I have a SQL database that displays a varbinary (max) like this 0x9406920691068F... I want to import it to python pycharm to get the same exact type of data.
However, it shows something like this instead
[b'\x94\x06\x92\x06\x91\x06\x8f\x06\x8d..
how do I copy the same numbers to python? I am a beginner in python, please help.
I copied the code from previous post and it didn't work
import pyodbc
def hexToString(binaryString):
try:
hashString = ["{0:0>2}".format(hex(b)[2:].upper()) for b in binaryString]
return '0x' + "".join(hashString)
except:
return binaryString
query = """ select P from Access.table """
conn_str = (
**** private database details # I don't copy on the page
)
cnxn = pyodbc.connect(conn_str)
cnxn.add_output_converter(pyodbc.SQL_BINARY, hexToString)
cursor = cnxn.cursor()
try:
cursor.execute(query)
row = cursor.fetchone()
except MySQLdb.error as err:
print(err)
else:
while row is not None:
print(row)
row = cursor.fetchone()
If the column return type is varbinary(max) then you need to add the output converter function to handle SQL_VARBINARY, not SQL_BINARY
cnxn.add_output_converter(pyodbc.SQL_VARBINARY, converter_function_name)

Python cx_Oracle SQL with bind string variable

I have a problem with creating SQL query for Oracle database using Python.
I want to bind string variable and it does not work, could you tell me what am I doing wrong?
This is my code:
import cx_Oracle
dokList = []
def LoadDatabase():
conn = None
cursor = None
try:
conn = cx_Oracle.connect("login", "password", "localhost")
cursor = conn.cursor()
query = "SELECT * FROM DOCUMENT WHERE DOC = :param"
for doknumber in dokList:
cursor.execute(query, {'doknr':doknumber})
print(cursor.rowcount)
except cx_Oracle.DatabaseError as err:
print(err)
finally:
if cursor:
cursor.close()
if conn:
conn.close()
def CheckData():
with open('changedNamed.txt') as f:
lines = f.readlines()
for line in lines:
dokList.append(line)
CheckData()
LoadDatabase()
The output of cursor.rowcount is 0 but it should be number greater than 0.
You're using a dictionary ({'doknr' : doknumber}) for your parameter, so it's a named parameter - the :param needs to match the key name. Try this:
query = "SELECT * FROM DOCUMENT WHERE DOC = :doknr"
for doknumber in dokList:
cursor.execute(query, {'doknr':doknumber})
print(cursor.rowcount)
For future troubleshooting, to check whether your parameter is getting passed properly, you can also try changing your query to "select :param from dual".

Categories