This question already has answers here:
how to continue for loop after exception?
(2 answers)
Closed 6 months ago.
I have a script which scraping a website every 7 secs and send wp message if statement true. But if any error occur while scraping, script stops. How can I run the code even so any error occur in the script?
import mysql
import mysql.connector
from twilio.rest import Client
import requests
from bs4 import BeautifulSoup
import sched
import time
s = sched.scheduler(time.time, time.sleep)
account_sid = "xxx"
account_token = "xxx"
client = Client(account_sid, account_token)
from_whatsapp_number = "whatsapp:xxx"
to_ali = "whatsapp:xxx"
number = "+xxx"
to_me = "whatsapp:xxx"
tolist = [to_me,to_ali]
# , to_ali, to_yiho, to_eno, to_huso,to_ramo, to_yuno, tohuso1, tohuso2, tohuso3, tohuso4
url = "https://sports2.holiganbet{}.com/tr/spor/yuksek-oran/101/dunya/240/lokasyon".format(
659)
def fetch_data(sc):
URLtest = url
req = requests.get(URLtest)
soupi = BeautifulSoup(req.content, 'html.parser')
container = soupi.find('a', attrs={'class': 'Anchor NavList__Anchor'})
if not container:
print("No match")
elif container:
time.sleep(2)
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="holi",)
mycursor = mydb.cursor()
URL = url
r = requests.get(URL)
soup = BeautifulSoup(r.content, 'html.parser')
# liste_elemanlari = soup.find_all('li', attrs={'class': 'NavList__Item'})
liste_elemanlari = soup.find('ul', attrs={'class': 'NavList'})
linkelementleri = liste_elemanlari.find_all(
'a', attrs={'class': 'Anchor NavList__Anchor'}, href=True)
for link_element in linkelementleri:
unique_id = link_element['href'].split("/")[-1]
macadi = link_element.text
mycursor.execute(
"SELECT macadi, COUNT(*) FROM maclar WHERE macadi = %s and title = %s GROUP BY macadi", (macadi, macadi))
myresult = mycursor.fetchall()
# gets the number of rows affected by the command executed
row_count = mycursor.rowcount
if row_count == 0:
for person in tolist:
client.messages.create(
body=link_element,
from_=from_whatsapp_number,
to=person
)
time.sleep(1)
sql = "INSERT INTO maclar (id, macadi,title) VALUES (%s, %s, %s)"
val = (unique_id, macadi, macadi)
mycursor.execute(sql, val)
mydb.commit()
if row_count > 0:
print("Maç Mevcut Mesaj Yok")
time.sleep(1)
sc.enter(10, 1, fetch_data, (sc,))
s.enter(10, 1, fetch_data, (s,))
s.run()
Add a try catch block and end error gracefully without rethrowing the error
Exception handling
try :
#yourlogic
except Exception as e:
sleep(2) or pass
Related
I'm wring a web scraping program to collect data from truecar.com
my database has 3 columns
and when I run the program I get an error which is this : list indext out of range
here is what I've done so far:
import mysql.connector
from bs4 import BeautifulSoup
import requests
import re
# take the car's name
requested_car_name = input()
# inject the car's name into the URL
my_request = requests.get('https://www.truecar.com/used-cars-for-sale/listings/' +
requested_car_name + '/location-holtsville-ny/?sort[]=best_match')
my_soup = BeautifulSoup(my_request.text, 'html.parser')
# ************ car_model column in database ******************
car_model = my_soup.find_all(
'span', attrs={'class': 'vehicle-header-make-model text-truncate'})
# we have a list of car models
car_list = []
for item in range(20):
# appends car_model to car_list
car_list.append(car_model[item].text)
car_string = ', '.join('?' * len(car_list))
# ************** price column in database *****************************
price = my_soup.find_all(
'div', attrs={'data-test': 'vehicleCardPricingBlockPrice'})
price_list = []
for item in range(20):
# appends price to price_list
price_list.append(price[item].text)
price_string = ', '.join('?' * len(price_list))
# ************** distance column in database ***************************
distance = my_soup.find_all('div', attrs={'data-test': 'vehicleMileage'})
distance_list = []
for item in range(20):
# appends distance to distance_list
distance_list.append(distance[item].text)
distance_string = ', '.join('?' * len(distance_list))
# check the connection
print('CONNECTING ...')
mydb = mysql.connector.connect(
host="xxxxx",
user="xxxxxx",
password="xxxxxx",
port='xxxxxx',
database='xxxxxx'
)
print('CONNECTED')
# checking the connection is done
my_cursor = mydb.cursor(buffered=True)
insert_command = 'INSERT INTO car_name (car_model, price, distance) VALUES (%s, %s, %s);' % (car_string, price_string, distance_string)
# values = (car_string, price_string, distance_string)
my_cursor.execute(insert_command, car_list, price_list, distance_list)
mydb.commit()
print(my_cursor.rowcount, "Record Inserted")
mydb.close()
and I have another problem that I can't insert a list into my columns and I have tried many ways but unfortunately I wasn't able to get it working
I think the problem is in this line:
IndexError Traceback (most recent call last)
<ipython-input-1-4a3930bf0f57> in <module>
23 for item in range(20):
24 # appends car_model to car_list
---> 25 car_list.append(car_model[item].text)
26
27 car_string = ', '.join('?' * len(car_list))
IndexError: list index out of range
I don't want it to insert the whole list to 1 row in database . I want the first 20 car's price, model, mileage in truecar.com in my database
Ya you are hard coding the length. Change how you are iterating through your soup elements. So:
import mysql.connector
from bs4 import BeautifulSoup
import requests
# take the car's name
requested_car_name = input('Enter car name: ')
# inject the car's name into the URL
my_request = requests.get('https://www.truecar.com/used-cars-for-sale/listings/' +
requested_car_name + '/location-holtsville-ny/?sort[]=best_match')
my_soup = BeautifulSoup(my_request.text, 'html.parser')
# ************ car_model column in database ******************
car_model = my_soup.find_all(
'span', attrs={'class': 'vehicle-header-make-model text-truncate'})
# we have a list of car models
car_list = []
for item in car_model:
# appends car_model to car_list
car_list.append(item.text)
# ************** price column in database *****************************
price = my_soup.find_all(
'div', attrs={'data-test': 'vehicleCardPricingBlockPrice'})
price_list = []
for item in price:
# appends price to price_list
price_list.append(item.text)
# ************** distance column in database ***************************
distance = my_soup.find_all('div', attrs={'data-test': 'vehicleMileage'})
distance_list = []
for item in distance:
# appends distance to distance_list
distance_list.append(item.text)
# check the connection
print('CONNECTING ...')
mydb = mysql.connector.connect(
host="xxxxx",
user="xxxxxx",
password="xxxxxx",
port='xxxxxx',
database='xxxxxx'
)
print('CONNECTED')
# checking the connection is done
my_cursor = mydb.cursor(buffered=True)
insert_command = 'INSERT INTO car_name (car_model, price, distance) VALUES (%s, %s, %s)'
values = list(zip(car_list, price_list, distance_list))
my_cursor.executemany(insert_command, values)
mydb.commit()
print(my_cursor.rowcount, "Record Inserted")
mydb.close()
ALTERNATE:
there's also the API where you can fetch the dat:
import mysql.connector
import requests
import math
# take the car's name
requested_car_name = input('Enter car name: ')
# inject the car's name into the URL
url = 'https://www.truecar.com/abp/api/vehicles/used/listings'
payload = {
'city': 'holtsville',
'collapse': 'true',
'fallback': 'true',
'include_incentives': 'true',
'include_targeted_incentives': 'true',
'make_slug': requested_car_name,
'new_or_used': 'u',
'per_page': '30',
'postal_code': '',
'search_event': 'true',
'sort[]': 'best_match',
'sponsored': 'true',
'state': 'ny',
'page':'1'}
jsonData = requests.get(url, params=payload).json()
total = jsonData['total']
total_pages = math.ceil(total/30)
total_pages_input = input('There are %s pages to iterate.\nEnter the number of pages to go through or type ALL: ' %total_pages)
if total_pages_input.upper() == 'ALL':
total_pages = total_pages
else:
total_pages = int(total_pages_input)
values = []
for page in range(1,total_pages+1):
if page == 1:
car_listings = jsonData['listings']
else:
payload.update({'page':'%s' %page})
jsonData = requests.get(url, params=payload).json()
car_listings = jsonData['listings']
for listing in car_listings:
vehicle = listing['vehicle']
ex_color = vehicle['exterior_color']
in_color = vehicle['interior_color']
location = vehicle['location']
price = vehicle['list_price']
make = vehicle['make']
model = vehicle['model']
mileage = vehicle['mileage']
style = vehicle['style']
year = vehicle['year']
engine = vehicle['engine']
accidentCount = vehicle['condition_history']['accidentCount']
ownerCount = vehicle['condition_history']['ownerCount']
isCleanTitle = vehicle['condition_history']['titleInfo']['isCleanTitle']
isFrameDamaged = vehicle['condition_history']['titleInfo']['isFrameDamaged']
isLemon = vehicle['condition_history']['titleInfo']['isLemon']
isSalvage = vehicle['condition_history']['titleInfo']['isSalvage']
isTheftRecovered = vehicle['condition_history']['titleInfo']['isTheftRecovered']
values.append((ex_color, in_color,location,price,make,model,mileage,
style,year,engine,accidentCount,ownerCount,isCleanTitle,isFrameDamaged,
isLemon, isSalvage,isTheftRecovered))
print('Completed: Page %s of %s' %(page,total_pages))
# check the connection
print('CONNECTING ...')
mydb = mysql.connector.connect(
host="xxxxx",
user="xxxxxx",
password="xxxxxx",
port='xxxxxx',
database='xxxxxx'
)
print('CONNECTED')
# checking the connection is done
my_cursor = mydb.cursor(buffered=True)
# create_command = ''' create table car_information (exterior_color varchar(255), interior_color varchar(255),location varchar(255),price varchar(255),make varchar(255),model varchar(255),mileage varchar(255),
# style varchar(255),year varchar(255),engine varchar(255),accidentCount varchar(255),ownerCount varchar(255),isCleanTitle varchar(255),isFrameDamaged varchar(255),
# isLemon varchar(255), isSalvage varchar(255),isTheftRecovered varchar(255))'''
# my_cursor.execute(create_command)
# print('created')
insert_command = '''INSERT INTO car_name (exterior_color, interior_color,location,price,make,model,mileage,
style,year,engine,accidentCount,ownerCount,isCleanTitle,isFrameDamaged,
isLemon, isSalvage,isTheftRecovered) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'''
my_cursor.executemany(insert_command, values)
mydb.commit()
print(my_cursor.rowcount, "Record Inserted")
mydb.close()
the problem seems to be that the list of car models has less than 20 entries.
for item in range(20):
car_list.append(car_model[item].text)
this always tries to append exactly 20 items to the car list. if you have less than 20 entries, there is an error, because car_model[20].text does not exist when there are only 10 entries. you can try
for item in range(len(car_model)):
car_list.append(car_model[item].text)
I am trying to get all the PC cores to work simultaneously while filling a PostgreSQL database, I have edited the code to make a reproducible error of what I am getting
Traceback (most recent call last):
File "test2.py", line 50, in <module>
download_all_sites(sites)
File "test2.py", line 36, in download_all_sites
pool.map(download_site, sites)
File "/usr/lib/python3.8/multiprocessing/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/usr/lib/python3.8/multiprocessing/pool.py", line 771, in get
raise self._value
psycopg2.OperationalError: SSL error: decryption failed or bad record mac
The full code which makes the error
import requests
import multiprocessing
import time
import os
import psycopg2
session = None
conn = psycopg2.connect(user="user",
password="pass123",
host="127.0.0.1",
port="5432",
database="my_db")
cursor = conn.cursor()
def set_global_session():
global session
if not session:
session = requests.Session()
def download_site(domain):
url = "http://" + domain
with session.get(url) as response:
temp = response.text.lower()
found = [i for i in keywords if i in temp]
query = """INSERT INTO test (domain, keyword) VALUES (%s, %s)"""
cursor.execute(query, (domain, found))
def download_all_sites(sites):
with multiprocessing.Pool(processes=os.cpu_count(), initializer=set_global_session) as pool:
pool.map(download_site, sites)
if __name__ == "__main__":
sites = ['google.com'] * 10
keywords = ['google', 'success']
start_time = time.time()
download_all_sites(sites)
duration = time.time() - start_time
conn.commit()
print(f"Finished {len(sites)} in {duration} seconds")
Create a new postgres connection for each multiprocess. Libpq connections shouldn’t be used with forked processes (what multiprocessing is doing), it is mentioned in the second warning box at the postgres docs.
import requests
import multiprocessing
import time
import os
import psycopg2
session = None
def set_global_session():
global session
if not session:
session = requests.Session()
def download_site(domain):
url = "http://" + domain
with session.get(url) as response:
#temp = response.text.lower()
#found = [i for i in keywords if i in temp]
#query = """INSERT INTO test (domain, keyword) VALUES (%s, %s)"""
conn = psycopg2.connect(
"dbname=mf port=5959 host=localhost user=mf_usr"
)
cursor = conn.cursor()
query = """INSERT INTO mytable (name) VALUES (%s)"""
cursor.execute(query, (domain, ))
conn.commit()
conn.close()
def download_all_sites(sites):
with multiprocessing.Pool(
processes=os.cpu_count(), initializer=set_global_session
) as pool:
pool.map(download_site, sites)
if __name__ == "__main__":
sites = ['google.com'] * 10
keywords = ['google', 'success']
start_time = time.time()
download_all_sites(sites)
duration = time.time() - start_time
print(f"Finished {len(sites)} in {duration} seconds")
# make sure it worked!
conn = psycopg2.connect("dbname=mf port=5959 host=localhost user=mf_usr")
cursor = conn.cursor()
cursor.execute('select count(name) from mytable')
print(cursor.fetchall()) # verify 10 downloads == 10 records in database
Out:
Finished 10 in 0.9922008514404297 seconds
[(10,)]
I am trying to add data to my SQlite3 table which runs on a function that takes two arguments to find a city and a neighbourhood def scrapecafes(city, area) Strangely, this works well with some of the arguments I am entering but not with others. For example if I run scrapecafes(melbourne, thornbury) the code works fine, but if I run scrapecafes(melbourne, carlton I get the following error: UnboundLocalError: local variable 'lat' referenced before assignment
I know the function definitely works, but I can't figure out why I am getting the UnboundLocalError for some arguments but not for others. Here is the code:
import folium
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_names)
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#print(cafeNamesClean)
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
type(soup_cafe_addresses)
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
#print(cafeAddressesClean)
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
try:
location = []
for item in cafeAddressesClean:
location.append(locator.geocode(item))
lat = [loc.latitude for loc in location]
long = [loc.longitude for loc in location]
except:
pass
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test555 (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT NOT NULL,
longitude FLOAT NOT NULL
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test555
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
The problem is geopy doesn't have co-ordinates for Carlton. Hence, you should change your table schema and insert null in those cases.
When geopy doesn't have data, it returns None and when try to call something on None it throws exception. You have to put the try/except block inside the for loop.
from bs4 import BeautifulSoup
import requests
from requests import get
import sqlite3
import geopandas
import geopy
from geopy.geocoders import Nominatim
from geopy.extra.rate_limiter import RateLimiter
#cafeNames
def scrapecafes(city, area):
#url = 'https://www.broadsheet.com.au/melbourne/guides/best-cafes-thornbury' #go to the website
url = f"https://www.broadsheet.com.au/{city}/guides/best-cafes-{area}"
response = requests.get(url, timeout=5)
soup_cafe_names = BeautifulSoup(response.content, "html.parser")
cafeNames = soup_cafe_names.findAll('h2', attrs={"class":"venue-title", }) #scrape the elements
cafeNamesClean = [cafe.text.strip() for cafe in cafeNames] #clean the elements
#cafeNameTuple = [(cafe,) for cafe in cafeNamesCleans
#addresses
soup_cafe_addresses = BeautifulSoup(response.content, "html.parser")
cafeAddresses = soup_cafe_addresses.findAll( attrs={"class":"address-content" })
cafeAddressesClean = [address.text for address in cafeAddresses]
#cafeAddressesTuple = [(address,) for address in cafeAddressesClean]
##geocode addresses
locator = Nominatim(user_agent="myGeocoder")
geocode = RateLimiter(locator.geocode, min_delay_seconds=1)
lat = []
long = []
for item in cafeAddressesClean:
try:
location = locator.geocode(item.strip().replace(',',''))
lat.append(location.latitude)
long.append(location.longitude)
except:
lat.append(None)
long.append(None)
#zip up for table
fortable = list(zip(cafeNamesClean, cafeAddressesClean, lat, long))
print(fortable)
##connect to database
try:
sqliteConnection = sqlite3.connect('25july_database.db')
cursor = sqliteConnection.cursor()
print("Database created and Successfully Connected to 25july_database")
sqlite_select_Query = "select sqlite_version();"
cursor.execute(sqlite_select_Query)
record = cursor.fetchall()
print("SQLite Database Version is: ", record)
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
#create table
try:
sqlite_create_table_query = ''' CREATE TABLE IF NOT EXISTS test (
name TEXT NOT NULL,
address TEXT NOT NULL,
latitude FLOAT,
longitude FLOAT
);'''
cursor = sqliteConnection.cursor()
print("Successfully Connected to SQLite")
cursor.execute(sqlite_create_table_query)
sqliteConnection.commit()
print("SQLite table created")
except sqlite3.Error as error:
print("Error while creating a sqlite table", error)
##enter data into table
try:
sqlite_insert_name_param = """INSERT INTO test
(name, address, latitude, longitude)
VALUES (?,?,?,?);"""
cursor.executemany(sqlite_insert_name_param, fortable)
sqliteConnection.commit()
print("Total", cursor.rowcount, "Records inserted successfully into table")
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
scrapecafes('melbourne', 'carlton')
Getting an IndexError: list index out of range Error. New to Python, complete beginner and would appreciate some help to understand whats wrong.
Getting json from a timesheet api which i need to save to a mysql database
import requests
import urllib2
from urllib2 import urlopen
import json
import mysql.connector
site = 'https://api.site.com/Projects/?version=5'
hdr = {'Authorization': 'WRAP access_token="TOKEN"', 'Accept': 'application/json'}
req = urllib2.Request(site, headers=hdr)
try:
page = urllib2.urlopen(req)
except urllib2.HTTPError, e:
print e.fp.read()
response = urllib2.urlopen(req).read()
json_obj = json.loads(response.decode ('utf8'))
conn = mysql.connector.connect(host = "HOST_IP",
user = "USER",
passwd = "PASS",
db = "DB",
port=3306,
collation="utf8mb4_unicode_ci")
cursor = conn.cursor ()
for Project in json_obj["Projects"]:
cursor.execute("""
INSERT INTO project
(ID, Client_Name, Name, BusSector, ProjCat, SageCode)
VALUES
(%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
Name = VALUES(Client_Name),
Name = VALUES(Name),
Name = VALUES(BusSector),
Name = VALUES(ProjCat),
Name = VALUES(SageCode);
""",(Project["ID"],Project["Client"]["Name"],Project["Name"],Project["CustomFields"][0]["Values"][0],Project["CustomFields"][2]["Values"][0],Project["CustomFields"][1]["Values"][0])
)
conn.commit()
cursor.close()
conn.close()
The output from Traceback is:
line 52, in <module> """,(Project["ID"],Project["Client"]["Name"],Project["Name"],Project["CustomFields"][0]["Values"][0],Project["CustomFields"][2]["Values"][0],Project["CustomFields"][1]["Values"][0])
I have crawled a webpage in order to crawl certain information like price, header and so on.
Now my goal is to insert the information into a databank. I already set up the databank with the respective fields that are needed.
That is my code:
def trade_spider(max_pages):
Language = "Japanese"
partner = La
location = Tokyo
already_printed = set()
for reg in Region:
count = 0
count1 = 0
page = -1
while page <= max_pages:
page += 1
response = urllib.request.urlopen("http://www.jsox.de/s/search.json?q=" + str(reg) +"&page=" + str(page))
jsondata = json.loads(response.read().decode("utf-8"))
format = (jsondata['activities'])
g_data = format.strip("'<>()[]\"` ").replace('\'', '\"')
soup = BeautifulSoup(g_data)
articles = soup.find_all("article", {"class": "activity-card activity-card-horizontal "})
try:
connection = mysql.connector.connect\
(host = "localhost", user = "root", passwd ="", db = "crawl")
except:
print("No connection to Server")
sys.exit(0)
cursor = connection.cursor()
cursor.execute("DELETE from prices_crawled where Location=" + str(location) + " and Partner=" + str(partner))
connection.commit()
for article in articles:
headers = article.find_all("h3", {"class": "activity"})
for header in headers:
header_initial = header.text.strip()
if header_initial not in already_printed:
already_printed.add(header_initial)
header_final = header_initial
prices = article.find_all("span", {"class": "price"})
for price in prices:
price_end = price.text.strip().replace(",","")[2:]
count1 += 1
if count1 > count:
pass
else:
price_final = price_end
deeplinks = article.find_all("a", {"class": "activity-card"})
for t in set(t.get("href") for t in deeplinks):
deeplink_initial = t
if deeplink_initial not in already_printed:
already_printed.add(deeplink_initial)
deeplink_final = deeplink_initial
cursor.execute('''INSERT INTO prices_crawled (price_id, Header, Price, Deeplink, Partner, Location, Language) \
VALUES(%s, %s, %s, %s, %s, %s, %s)''', ['None'] + [header_final] + [price_final] + [deeplink_final] + [partner] + [location] + [Language])
connection.commit()
cursor.close()
connection.close()
trade_spider(int(Spider))
The issue is that the information do not get into the database. Furthermore, I do not get any error message. Hence, I do not know what I´m doing wrong.
Could you guys help me out? Any feedback is appreciated
Is the delete statement working?
I think the problem is the way you pass your variables
Change your syntax like this:
sql_insert_tx = "INSERT INTO euro_currencies (pk,currency,rate,date) values (null,'USD','%s','%s')" % (usd,date)
cursor.execute(sql_insert_tx)