Getting an IndexError: list index out of range Error. New to Python, complete beginner and would appreciate some help to understand whats wrong.
Getting json from a timesheet api which i need to save to a mysql database
import requests
import urllib2
from urllib2 import urlopen
import json
import mysql.connector
site = 'https://api.site.com/Projects/?version=5'
hdr = {'Authorization': 'WRAP access_token="TOKEN"', 'Accept': 'application/json'}
req = urllib2.Request(site, headers=hdr)
try:
page = urllib2.urlopen(req)
except urllib2.HTTPError, e:
print e.fp.read()
response = urllib2.urlopen(req).read()
json_obj = json.loads(response.decode ('utf8'))
conn = mysql.connector.connect(host = "HOST_IP",
user = "USER",
passwd = "PASS",
db = "DB",
port=3306,
collation="utf8mb4_unicode_ci")
cursor = conn.cursor ()
for Project in json_obj["Projects"]:
cursor.execute("""
INSERT INTO project
(ID, Client_Name, Name, BusSector, ProjCat, SageCode)
VALUES
(%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
Name = VALUES(Client_Name),
Name = VALUES(Name),
Name = VALUES(BusSector),
Name = VALUES(ProjCat),
Name = VALUES(SageCode);
""",(Project["ID"],Project["Client"]["Name"],Project["Name"],Project["CustomFields"][0]["Values"][0],Project["CustomFields"][2]["Values"][0],Project["CustomFields"][1]["Values"][0])
)
conn.commit()
cursor.close()
conn.close()
The output from Traceback is:
line 52, in <module> """,(Project["ID"],Project["Client"]["Name"],Project["Name"],Project["CustomFields"][0]["Values"][0],Project["CustomFields"][2]["Values"][0],Project["CustomFields"][1]["Values"][0])
Related
I am trying to add data to my SQlite3 table which runs on a function that takes two arguments to find a city and a neighbourhood def scrapecafes(city, area) Strangely, this works well with some of the arguments I am entering but not with others. For example if I run scrapecafes(melbourne, thornbury) the code works fine, but if I run scrapecafes(melbourne, carlton I get the following error: UnboundLocalError: local variable 'lat' referenced before assignment
I know the function definitely works, but I can't figure out why I am getting the UnboundLocalError for some arguments but not for others. Here is the code:
import folium
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_names)
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#print(cafeNamesClean)
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_addresses)
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
#print(cafeAddressesClean)
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
try:
location = []
for item in cafeAddressesClean:
location.append(locator.geocode(item))
lat = [loc.latitude for loc in location]
long = [loc.longitude for loc in location]
except:
pass
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test555 (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT NOT NULL,
longitude FLOAT NOT NULL
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test555
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
The problem is geopy doesn't have co-ordinates for Carlton. Hence, you should change your table schema and insert null in those cases.
When geopy doesn't have data, it returns None and when try to call something on None it throws exception. You have to put the try/except block inside the for loop.
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
lat = []
long = []
for item in cafeAddressesClean:
try:
location = locator.geocode(item.strip().replace(',',''))
lat.append(location.latitude)
long.append(location.longitude)
except:
lat.append(None)
long.append(None)
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT,
longitude FLOAT
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
scrapecafes('melbourne', 'carlton')
I cannot get this script that i created to work.
it needs to collect API data (returns a JSON)
and i want to save specific data to MYSQL
played around with the code and didnt get it to work...
various "expected an indented block" errors
from __future__ import print_function
import requests
import re
import MySQLdb
import json
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(self, data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
i expect the output to be stored into a mysql table... but i get error while trying to run the script.
Also i need to make it run endlessly untill ill kill the process/session....
from __future__ import print_function
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "ssss!"
DATABASE = "sss"
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO usa_news (articles) VALUES (%s)")
cursor.execute(insert_query, (articles,))
db.commit()
cursor.close()
db.close()
return
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=ssssssssss"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
#insert the data into the MySQL database
store_data(articles)
Finally made it work!
Your indents are all messed up, Python relies on indents. Didn't look at the code itself so it might still be bugged, but fixed the indents:
from __future__ import print_function
import requests
import re
import MySQLdb
import json
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
if __name__ == '__main__':
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
on_data(data)
Updated to reflect changes suggested in comments
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "xxxxx"
DATABASE = "xxxxx"
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=xxxxxxxxxxxxxxxxxxx"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO xxxxx (articles) VALUES (%s)")
cursor.execute(insert_query, (articles))
db.commit()
cursor.close()
db.close()
return
#insert the data into the MySQL database
store_data(articles)
I have a GCF in Python to refresh an access token each 9 hours.
When I test the function I get a Response 200, success!
But when I send POST requests or try to call the function via Google Scheduler I got an error Error: could not handle the request on the first 3 or more times. After send a couple of requests the function is executed. Why?
My function below:
from os import getenv
import requests
import pymysql
import json
from pymysql.err import OperationalError
CONNECTION_NAME = getenv('INSTANCE_CONNECTION_NAME', '')
DB_USER = getenv('MYSQL_USER', '')
DB_PASSWORD = getenv('MYSQL_PASSWORD', '')
DB_NAME = getenv('MYSQL_DATABASE', '')
mysql_config = {
'user': DB_USER,
'password': DB_PASSWORD,
'db': DB_NAME,
'charset': 'utf8mb4',
'cursorclass': pymysql.cursors.DictCursor,
'autocommit': True
}
mysql_conn = None
def __get_cursor():
try:
return mysql_conn.cursor()
except OperationalError:
mysql_conn.ping(reconnect=True)
return mysql_conn.cursor()
def authMonzo(request):
global mysql_conn
global endpoint
endpoint = ''
if not mysql_conn:
try:
mysql_conn = pymysql.connect(**mysql_config)
except OperationalError:
mysql_config['unix_socket'] = f'/cloudsql/{CONNECTION_NAME}'
mysql_conn = pymysql.connect(**mysql_config)
with __get_cursor() as cursor:
cursor.execute("SELECT * FROM TOKENS ORDER BY ID DESC LIMIT 1")
result = cursor.fetchall()
clientSecret = result[...]
clientId = result[...]
refreshToken = result[...]
# Change the last refresh token for a new access token
tokenRequest = requests.post(endpoint, data={'grant_type': 'refresh_token', 'client_id': clientId, 'client_secret': clientSecret, 'refresh_token': refreshToken})
tokenJson = json.loads(tokenRequest.content)
print(tokenJson)
# Assing the values to variables
accessToken = tokenJson['access_token']
refreshToken = tokenJson['refresh_token']
scope = tokenJson['scope']
clientId = tokenJson['client_id']
expiresIn = tokenJson['expires_in']
userId = tokenJson['user_id']
tokenType = tokenJson['token_type']
# Insert the new token on the table tokens
SQLToken = ("INSERT INTO TOKENS (CLIENT_SECRET, ACCESS_TOKEN, REFRESH_TOKEN, TOKEN_TYPE, USER_ID, CLIENT_ID, EXPIRES_IN, SCOPE) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
Values = (clientSecret, accessToken, refreshToken, tokenType, userId, clientId, expiresIn, scope)
cursor.execute(SQLToken, Values)
mysql_conn.close()
return str(accessToken)
This is the first time I'm creating an API for android retrofit. I modified this code according to the snippet I got online. The main functionality of the post method is to take the given parameters and store it in the sqlite3 database.
My schema of the following two tables:
sqlite> .schema spending
CREATE TABLE spending(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
date TEXT ,
reason TEXT ,
amount INTEGER
);
CREATE TABLE receiving(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
date TEXT ,
from_reason TEXT ,
amount INTEGER
);
from flask import Flask, request
from flask_restful import Resource, Api
from sqlalchemy import create_engine
from flask import jsonify
db_connect = create_engine('sqlite:///api.db')
app = Flask(__name__)
api = Api(app)
class AddSpending(Resource):
def add_spending(self):
try:
_json = request.json
_date = _json['date']
_reason = _json['reason']
_amount = _json['amount']
# validate the received values
if _date and _reason and _amount and request.method == 'POST':
#do not save password as a plain text
#_hashed_password = generate_password_hash(_password)
# save edits
sql = "INSERT INTO spending(date, reason, amount) VALUES(%s, %s, %d)"
data = (_date, _reason, _amount)
#conn = mysql.connect()
conn = db_connect.connect()
cursor = db_connect.cursor()
conn.cursor()
conn.execute(sql, data)
conn.commit()
#resp = jsonify('Spending added successfully!')
#resp.status_code = 200
return
else:
return 404
except Exception as e:
print(e)
finally:
cursor.close()
conn.close()
api.add_resource(AddSpending, '/spending_up',methods=['POST']) # Route_3
When a user passes data through this parameter. The data should be stored in the database
I think the problem is that you called you method as add_spending and shoud be named as post
change def add_spending(self) by def post(self)
the code for your api should look like that, without the methods='POST'
class AddSpending(Resource):
def post(self):
try:
_json = request.json
_date = _json['date']
_reason = _json['reason']
_amount = _json['amount']
# validate the received values
if _date and _reason and _amount and request.method == 'POST':
#do not save password as a plain text
#_hashed_password = generate_password_hash(_password)
# save edits
sql = "INSERT INTO spending(date, reason, amount) VALUES(%s, %s, %d)"
data = (_date, _reason, _amount)
#conn = mysql.connect()
conn = db_connect.connect()
cursor = db_connect.cursor()
conn.cursor()
conn.execute(sql, data)
conn.commit()
#resp = jsonify('Spending added successfully!')
#resp.status_code = 200
return
else:
return 404
except Exception as e:
print(e)
finally:
cursor.close()
conn.close()
api.add_resource(AddSpending, '/spending_up') # Route_3
UPDATE
I just tried with a code similar to yours and worked
ANOTHER UPDATE
your repo code
Using the content of this xml example saved on a local file called test.xml , I'm trying to parse the contents and insert into my database using the following code:
import cx_Oracle
import re
import os
import xml.etree.ElementTree as ET
from ConfigParser import SafeConfigParser
def db_callmany(cfgFile, sql,params):
parser = SafeConfigParser()
parser.read(cfgFile)
dsn = parser.get('odbc', 'dsn')
uid = parser.get('odbc', 'user')
pwd = parser.get('odbc', 'pass')
try:
con = None
con = cx_Oracle.connect(uid , pwd, dsn)
cur = con.cursor()
cur.executemany(sql,params)
con.commit()
except cx_Oracle.DatabaseError, e:
print 'Error %s' % e
sys.exit(1)
finally:
if con:
con.close()
if __name__ == '__main__':
try:
cfgFile='c:\\tests\\dbInfo.cfg'
tree = ET.parse('test.xml')
root = tree.getroot()
mdfList = []
for book in root.findall('book'):
author = book.find('genre').text
title = book.find('price').text
the = str((author,title))
mdfList.append(the)
sql = "INSERT INTO book_table ( GENRE, PRICE )"
db_callmany(cfgFile,sql,mdfList)
except KeyboardInterrupt:
sys.stdout('\nInterrupted.\n')
But executing this code I receive the following error:
Error ORA-01036: illegal variable name/number
Exit code: 1
Not sure what I can be missing here in order to get this code working.
CX_Oracle requires placeholders and then a sequence of dictionaries for executemany rather than a sequence of sequences - so something like:
mdfList = list()
for book in root.findall('book'):
Values = dict()
Values['GENRE'] = book.find('genre').text
Values['PRICE'] = book.find('price').text
mdfList.append(Values)
sql = "INSERT INTO book_table (GENRE, PRICE) VALUES (:GENRE, :PRICE)"
db_callmany(cfgFile, sql, mdfList)