MySQL: I don't understand why this is happening? - python

import sqlite3
import traceback
from time import sleep
import mysql.connector
def check_user(user_id):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cur = conn.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS online(id INT, last_online_date TEXT)')
conn.commit()
select = "SELECT * FROM online WHERE id = %s LIMIT 0, 1"
result = cur.execute(select, (user_id,))
if result is None:
insert = ('INSERT INTO online (id, last_online_date) VALUES (%s, %s)')
cur.reset()
cur.execute(insert, (user_id, online_time))
conn.commit()
def update_online_status(user_id, online_time):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cursor = conn.cursor()
select = 'SELECT last_online_date FROM online WHERE id = %s'
result = cursor.execute(select, (user_id,))
old_online = result
online_time = f'{old_online},{online_time}'
cursor.reset()
cursor.execute('UPDATE online SET last_online_date = %s WHERE id = %s', (online_time, user_id))
conn.commit()
app = Client("my_account")
app.start()
while True:
try:
with open('ids.ini', 'r') as file:
users = file.read().splitlines()
for user in users:
result = app.get_users(user)
user_id = result['id']
if result['status'] == 'offline':
unix_timestamp = float(result['last_online_date'])
local_timezone = tzlocal.get_localzone()
local_time = datetime.fromtimestamp(unix_timestamp, local_timezone)
online_time = local_time.strftime("%Y/%m/%d %H:%M:%S")
elif result['status'] == 'online':
now = datetime.now()
online_time = now.strftime("%Y/%m/%d %H:%M:%S")
check_user(user_id)
update_online_status(user_id, online_time)
# sleep(300)
except Exception:
traceback.print_exc()
continue
app.stop()
I am writing a program that would read the online status of a user in telegram.
Instead of writing online to an existing user, a huge number of identical rows appear in the database.
Example:
Table with repetitions
When I try to fix something, there are a lot of errors.
mysql.connector.errors.programmingerror: not all parameters were used in the sql statement
mysql.connector.errors.internalerror: unread result found
and other...
Pls help!!

Related

I'm trying to insert data that I've scrapped off twitter into my postgres database but failing

I'm using snscrape lib to scrape twitter data off twitter. I want to insert this data into my database but I seem to be failing no matter what method I try. when I use a loop and create a sql query after the loop to insert the values 1 by 1. I get an IndexError and a TypeError. When I try to append the data into a list. I can't loop in to each value 1 by 1. Now I'm stuck and don't know what to do.
method 1
class Tweet_list():
def tweets_list1(self):
dbname = '******'
user = '******'
password = '******'
host = '*******'
port = ****
cur = None
conn = None
try:
conn = psycopg2.connect(
dbname = dbname,
user = user,
password = password,
host = host,
port = port
)
cur = conn.cursor()
cur.execute('DROP TABLE IF EXISTS Machine_twitter')
create_table = '''CREATE TABLE IF NOT EXISTS Machine_twitter (
id int PRIMARY KEY,
Tweet text,
Tweet_id int,
Timestamp timestamp,
Replys int,
Retweets int,
Likes int,
Username char)'''
cur.execute(create_table)
for i, tweet in enumerate(sntwitter.TwitterSearchScraper('from:TheHoopCentral').get_items()):
if i > 5:
break
insert_tweet = 'INSERT INTO Machine_twitter (Tweet, Tweet_id, Timestamp, Replys, Retweets, Likes, Username) VALUES (%s, %s, %s, %s,%s, %s, %s)'
insert_values = (tweet.content, tweet.id, tweet.date, tweet.replyCount, tweet.retweetCount, tweet.likeCount, tweet.user.username)
cur.execute(insert_tweet, insert_values)
conn.commit()
print('completed')
except Exception as error:
print(error)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
tweets = Tweet_list()
tweets2 = Tweet_list()
tweets2.tweets_list1()
error
IndexError: list index out of range
method 2
def update_list1(self):
tweets_list2 = []
for i, tweet in enumerate(sntwitter.TwitterSearchScraper('from:TheHoopCentral').get_items()):
if i > 100:
break
tweets_list2.append([tweet.content, tweet.id,tweet.likeCount, tweet.retweetCount, tweet.replyCount, tweet.user.username])
tweet_df = pd.DataFrame(tweets_list2, columns=('tweet', 'tweet id', 'likeCount', 'retweetCount', 'replyCount', 'username'))
tweet_df.head()
the problem with the second method is that after the list gets appended. I can't access the values(eg. tweet.content) so I can insert them into the database. I've tried every method under the sun but I'm failing miserably can somebody help.

Query fails to execute but returns no error - Python

My query returns no error but doesn't commit to my database.
import mysql.connector
from datetime import date
def ImportKey():
testsite_array = []
converted_list = []
cnx = mysql.connector.connect(user='USER', password='PASSWORD', host='HOST', database='DATABASE')
cursor = cnx.cursor()
keyType = input("Valid types: Day, Month, Life:\n > ")
if keyType == "Day":
with open('Keys.txt') as my_file:
for line in my_file:
testsite_array.append(line)
for element in testsite_array:
converted_list.append(element.strip())
sql_query = "INSERT INTO `DailyK`(`keyDaily`) VALUES (%s)"
cursor.execute(sql_query, (converted_list[0], ))
cnx.commit()
cursor.close()
cnx.close()
Why does this code not work? It returns no error and from what I can see there is nothing wrong with it. Help would be much appreciated.

Sqlite 3: Error opening the databaseIncorrect number of bindings supplied. The current statement uses 1, and there are 4 supplied

I've already tried adding in a comma after Name and the question mark in "VALUES" and was getting a syntax error for my parthenthesis.
#app.route("/Disease/new", methods = ["POST"])
def addDisease():
newDisease = {}
conn = None
try:
jsonPostData = request.get_json()
Name = jsonPostData["Name"]
conn = sqlite3.connect("./dbs/ContactTracer.db")
conn.row_factory = sqlite3.Row
sql = """
INSERT INTO Disease(Name) VALUES(?)
"""
cursor = conn.cursor()
cursor.execute(sql, (Name))
conn.commit()
sql = """
SELECT Disease.ID, Disease.Name
From Disease
Where Disease.ID = ?
"""
cursor.execute(sql,(cursor.lastrowid,))
row = cursor.fetchone()
newDisease["ID"] = row["ID"]
newDisease["Name"] = row["Name"]
except Error as e:
print(f"Error opening the database{e}")
abort(500)
finally:
if conn:
conn.close()
return newDisease
Remove the () and check if INSERT succeeded
cursor.execute(sql, Name)
...
if cursor.lastrowid:
cursor.execute(sql, cursor.lastrowid)

Updating results from a mysql-connector fetchall

I'm trying to select certain records from the civicrm_address table and update the geocode columns. I use fetchall to retrieve the rows then, within the same loop, I try to update with the results of the geocoder API, passing the civicrm_address.id value in the update_sql statement.
The rowcount after the attempted update and commit is always -1 so I am assuming it failed for some reason but I have yet to figure out why.
import geocoder
import mysql.connector
mydb = mysql.connector.connect(
[redacted]
)
mycursor = mydb.cursor(dictionary=True)
update_cursor = mydb.cursor()
sql = """
select
a.id
, street_address
, city
, abbreviation
from
civicrm_address a
, civicrm_state_province b
where
location_type_id = 6
and
a.state_province_id = b.id
and
street_address is not null
and
city is not null
limit 5
"""
mycursor.execute(sql)
rows = mycursor.fetchall()
print(mycursor.rowcount, "records selected")
for row in rows:
address_id = int(row["id"])
street_address = str(row["street_address"])
city = str(row["city"])
state = str(row["abbreviation"])
myaddress = street_address + " " + city + ", " + state
g = geocoder.arcgis(myaddress)
d = g.json
latitude = d["lat"]
longitude = d["lng"]
update_sql = """
begin work;
update
civicrm_address
set
geo_code_1 = %s
, geo_code_2 = %s
where
id = %s
"""
var=(latitude, longitude, address_id)
print(var)
update_cursor.execute(update_sql, var, multi=True)
mydb.commit()
print(update_cursor.rowcount)
mycursor.close()
update_cursor.close()
mydb.close()
Here is a simpler script:
I have executed the update_sql statement directly in the MySQL workbench and it succeeds. It is not working from Python.
import geocoder
import mysql.connector
try:
mydb = mysql.connector.connect(
[redacted]
)
mycursor = mydb.cursor(dictionary=True)
update_cursor = mydb.cursor()
update_sql = """
begin work;
update
civicrm_address
set
geo_code_1 = 37.3445
, geo_code_2 = -118.5366074
where
id = 65450;
"""
update_cursor.execute(update_sql, multi=True)
mydb.commit()
print(update_cursor.rowcount, "row(s) were updated")
except mysql.connector.Error as error:
print("Failed to update record to database: {}".format(error))
mydb.rollback()
finally:
# closing database connection.
if (mydb.is_connected()):
mydb.close()
I have it working now. I did remove the "begin work" statement but not the multi=True and it wouldn't work. Later I removed the multi=True statement and it works.

python to write data into table error

write python program to create a mysql table and insert data into this table,the program is as follows:
def pre_data_db_manage(type,data):
conn = pymysql.connect(host="localhost", port=3306, user="root", passwd="********", db="facebook_info",charset="utf8")
cur = conn.cursor()
if type == "pre_davi_group_members_data":
is_exist_table_sql = "SHOW TABLES LIKE 'fb_pre_davi_group_members_posts'"
if cur.execute(is_exist_table_sql) == 0:
create_table_sql = '''CREATE TABLE fb_pre_davi_group_members_posts (id bigint not null primary key auto_increment,userID bigint,userName varchar(128),userURL varchar(256),
postTime varchar(128),postText text,postTextLength int,likesCount int,sharesCount int,commentsCount int,postTextPolarity varchar(64),postTextSubjectivity varchar(64))'''
cur.execute(create_table_sql)
r = re.compile(r'^[a-zA-Z0-9]')
for item in data:
if "'" in item["PostText"]:
item["PostText"] = item["PostText"].replace("'"," ")
if "\\" in item["PostText"]:
item["PostText"] = item["PostText"].replace("\\","\\\\")
for i in item["PostText"]:
result = r.match(i)
if result == None:
print("in re")
item['PostText'] = item['PostText'].replace(i, ' ')
if "nan" in item["SharesCount"]:
item["SharesCount"] = 0
if "nan" in item["LikesCount"]:
item["LikesCount"] = 0
if "nan" in item["CommentsCount"]:
item["CommentsCount"] = 0
if "nan" in item["PostTextLength"]:
item["PostTextLength"] = 0
item["PostTextLength"] = int(item["PostTextLength"])
item["LikesCount"] = int(item["LikesCount"])
item["SharesCount"] = int(item["SharesCount"])
item["CommentsCount"] = int(item["CommentsCount"])
if type == "pre_davi_group_members_data":
insert_sql = '''INSERT INTO fb_pre_davi_group_members_posts (userID,userName,userURL,
postTime,postText,postTextLength,likesCount,sharesCount,commentsCount,postTextPolarity,postTextSubjectivity) VALUES
({0},"{1}",'{2}','{3}','{4}',{5},{6},{7},{8},{9},{10})'''.format(item["UserID"],item["UserName"],item["UserURL"],item["PostTime"],item["PostText"],item["PostTextLength"],item["LikesCount"],item["SharesCount"],item["CommentsCount"],item["PostTextPolarity"],item["PostTextSubjectivity"])
print(insert_sql)
try:
cur.execute(insert_sql)
except Exception as e:
print("insert error")
continue
cur.close()
conn.commit()
conn.close()
and write call statement as follows:
type = "pre_davi_group_members_data"
pre_data_db_manage(type, df_list)
however,when execute this program, found that no data have been inserted into table:fb_pre_davi_group_members_posts,
in the mysql order line, write:
select count(*) from fb_pre_davi_group_members_posts;
the result is 0
could you please tell me the reason and how to solve it

Categories