I am trying to get all the PC cores to work simultaneously while filling a PostgreSQL database, I have edited the code to make a reproducible error of what I am getting
Traceback (most recent call last):
File "test2.py", line 50, in <module>
download_all_sites(sites)
File "test2.py", line 36, in download_all_sites
pool.map(download_site, sites)
File "/usr/lib/python3.8/multiprocessing/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/usr/lib/python3.8/multiprocessing/pool.py", line 771, in get
raise self._value
psycopg2.OperationalError: SSL error: decryption failed or bad record mac
The full code which makes the error
import requests
import multiprocessing
import time
import os
import psycopg2
session = None
conn = psycopg2.connect(user="user",
password="pass123",
host="127.0.0.1",
port="5432",
database="my_db")
cursor = conn.cursor()
def set_global_session():
global session
if not session:
session = requests.Session()
def download_site(domain):
url = "http://" + domain
with session.get(url) as response:
temp = response.text.lower()
found = [i for i in keywords if i in temp]
query = """INSERT INTO test (domain, keyword) VALUES (%s, %s)"""
cursor.execute(query, (domain, found))
def download_all_sites(sites):
with multiprocessing.Pool(processes=os.cpu_count(), initializer=set_global_session) as pool:
pool.map(download_site, sites)
if __name__ == "__main__":
sites = ['google.com'] * 10
keywords = ['google', 'success']
start_time = time.time()
download_all_sites(sites)
duration = time.time() - start_time
conn.commit()
print(f"Finished {len(sites)} in {duration} seconds")
Create a new postgres connection for each multiprocess. Libpq connections shouldn’t be used with forked processes (what multiprocessing is doing), it is mentioned in the second warning box at the postgres docs.
import requests
import multiprocessing
import time
import os
import psycopg2
session = None
def set_global_session():
global session
if not session:
session = requests.Session()
def download_site(domain):
url = "http://" + domain
with session.get(url) as response:
#temp = response.text.lower()
#found = [i for i in keywords if i in temp]
#query = """INSERT INTO test (domain, keyword) VALUES (%s, %s)"""
conn = psycopg2.connect(
"dbname=mf port=5959 host=localhost user=mf_usr"
)
cursor = conn.cursor()
query = """INSERT INTO mytable (name) VALUES (%s)"""
cursor.execute(query, (domain, ))
conn.commit()
conn.close()
def download_all_sites(sites):
with multiprocessing.Pool(
processes=os.cpu_count(), initializer=set_global_session
) as pool:
pool.map(download_site, sites)
if __name__ == "__main__":
sites = ['google.com'] * 10
keywords = ['google', 'success']
start_time = time.time()
download_all_sites(sites)
duration = time.time() - start_time
print(f"Finished {len(sites)} in {duration} seconds")
# make sure it worked!
conn = psycopg2.connect("dbname=mf port=5959 host=localhost user=mf_usr")
cursor = conn.cursor()
cursor.execute('select count(name) from mytable')
print(cursor.fetchall()) # verify 10 downloads == 10 records in database
Out:
Finished 10 in 0.9922008514404297 seconds
[(10,)]
Related
Im programming for my friend a website like youtube.
But I always get this error when i go to http://localhost:2389/watch.v=f4efc9de771d4aba85ee0a88bbce08b9
This is the server code:
#app.route('/watch.v=<VideoId>')
def watch(VideoId):
return render_template(
"video.html",
VideoName=video.load_from_id(VideoId),
VideoId=VideoId
)
and this is the database helper:
from sqlite3 import OperationalError
from qrcode import *
from pathlib import Path
import sqlite3
import os
import uuid
DEFAULT_PATH = (os.getcwd() + "\\api\database.db")
connection = sqlite3.connect(DEFAULT_PATH)
cursor = connection.cursor()
cur = cursor
def generateID():
return uuid.uuid4().hex
class video:
def db():
try:
connection = sqlite3.connect(DEFAULT_PATH)
cursor = connection.cursor()
cursor.execute("CREATE TABLE video (name, videoID);")
connection.commit()
print("Creating Database in:", DEFAULT_PATH.upper())
print("[+] Database successfull created!")
except OperationalError:
print("[*] Database allready exists!")
def load_from_id(id):
cursor.execute(f'SELECT name from video WHERE videoID="{id}"')
v = cursor.fetchall()
return str(v).replace("[", "").replace("]", "").replace("'", "").replace("(", "").replace(")", "").strip(",")
class new:
def newVideo(name):
i = generateID()
NewUserData = "INSERT INTO video (name, videoID) VALUES (?, ?)"
cursor.execute(NewUserData, (name, i))
connection.commit()
print(f"[+] Video successfull ceated ({name}, {i}).")
if __name__ == "__main__":
#video.db()
#video.new.new("Test_01")
n = video.load_from_id("f4efc9de771d4aba85ee0a88bbce08b9")
print(n)
And this is the error:
Traceback (most recent call last):
File "C:\Users\admin\OneDrive\Desktop\youtube\server.py"
cursor.execute(f'SELECT name from video WHERE videoID="{id}"')
sqlite3.ProgrammingError: SQLite objects created in a thread can only be used in that same thread.
The object was created in thread id 15232 and this is thread id 13568.
I hope someone can help me.
You need to create the cursor in same method that is load_from_id(id)
Your code would look like
def load_from_id(id):
cursor = cursor = connection.cursor()
cursor.execute(f'SELECT name from video WHERE videoID="{id}"')
[...]
I found the problem.
I have to do this:
def load_from_id(id):
try:
cursor = connection.cursor()
data = f'SELECT name from video WHERE videoID="{id}"'
cursor.execute(data)
v = cursor.fetchall()
return str(v).replace("[", "").replace("]", "").replace("'", "").replace("(", "").replace(")", "").strip(",")
except ProgrammingError as pe:
print(pe)
import sqlite3
import traceback
from time import sleep
import mysql.connector
def check_user(user_id):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cur = conn.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS online(id INT, last_online_date TEXT)')
conn.commit()
select = "SELECT * FROM online WHERE id = %s LIMIT 0, 1"
result = cur.execute(select, (user_id,))
if result is None:
insert = ('INSERT INTO online (id, last_online_date) VALUES (%s, %s)')
cur.reset()
cur.execute(insert, (user_id, online_time))
conn.commit()
def update_online_status(user_id, online_time):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cursor = conn.cursor()
select = 'SELECT last_online_date FROM online WHERE id = %s'
result = cursor.execute(select, (user_id,))
old_online = result
online_time = f'{old_online},{online_time}'
cursor.reset()
cursor.execute('UPDATE online SET last_online_date = %s WHERE id = %s', (online_time, user_id))
conn.commit()
app = Client("my_account")
app.start()
while True:
try:
with open('ids.ini', 'r') as file:
users = file.read().splitlines()
for user in users:
result = app.get_users(user)
user_id = result['id']
if result['status'] == 'offline':
unix_timestamp = float(result['last_online_date'])
local_timezone = tzlocal.get_localzone()
local_time = datetime.fromtimestamp(unix_timestamp, local_timezone)
online_time = local_time.strftime("%Y/%m/%d %H:%M:%S")
elif result['status'] == 'online':
now = datetime.now()
online_time = now.strftime("%Y/%m/%d %H:%M:%S")
check_user(user_id)
update_online_status(user_id, online_time)
# sleep(300)
except Exception:
traceback.print_exc()
continue
app.stop()
I am writing a program that would read the online status of a user in telegram.
Instead of writing online to an existing user, a huge number of identical rows appear in the database.
Example:
Table with repetitions
When I try to fix something, there are a lot of errors.
mysql.connector.errors.programmingerror: not all parameters were used in the sql statement
mysql.connector.errors.internalerror: unread result found
and other...
Pls help!!
I can successfully connect to SQL Server from my jupyter notebook with this script:
from sqlalchemy import create_engine
import pyodbc
import csv
import time
import urllib
params = urllib.parse.quote_plus('''DRIVER={SQL Server Native Client 11.0};
SERVER=SV;
DATABASE=DB;
TRUSTED_CONNECTION=YES;''')
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
And I can successfully execute SQL stored procedures without parameters from jupyter notebook with the following function :
def execute_stored_procedure(engine, procedure_name):
res = {}
connection = engine.raw_connection()
try:
cursor = connection.cursor()
cursor.execute("EXEC "+procedure_name)
cursor.close()
connection.commit()
res['status'] = 'OK'
except Exception as e:
res['status'] = 'ERROR'
res['error'] = e
finally:
connection.close()
return res
How please could I transform this previous function for stored procedures having several parameters (two in my case) ?
Solution of my problem, working only for stored procedures with 0 or 2 parameters (just edit the 10th line if you want another number of parameters):
def execute_stored_procedure(engine, procedure_name,params_dict=None):
res = {}
connection = engine.raw_connection()
try:
cursor = connection.cursor()
if params_dict is None:
cursor.execute("EXEC "+procedure_name)
else:
req = "EXEC "+procedure_name
req += ",".join([" #"+str(k)+"='"+str(v)+"'" for k,v in params_dict.items()])
cursor.execute(req)
cursor.close()
connection.commit()
res['status'] = 'OK'
except Exception as e:
res['status'] = 'ERROR'
res['error'] = e
finally:
connection.close()
return res
I have a database named products in sql and i wish to get all the rows as a dictionary or json. I've seen an example here but how do i pass username, password and host?
This is the example:
import json
import psycopg2
def db(database_name='products'):
return psycopg2.connect(database=database_name)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
When i use it like this i'm getting this error:
File "/home/alex/Documents/Proiecte/Python/bapp/venv/lib/python3.5/site-packages/psycopg2/__init__.py", line 130, in connect
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
psycopg2.OperationalError: fe_sendauth: no password supplied
When i'm doing like this
import json
import psycopg2
def db(database_name='products', password='...', host='123.123.123.13', user='alex'):
return psycopg2.connect(database=database_name, password=password, host=host, user=user)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
It won't print anything, it just remains like in sleep.
How can i do it?
Try this:
import psycopg2
import json
def main():
conn_string = "database_name='products', password='...', host='123.123.123.13', user='alex'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
# conn.cursor will return a cursor object, you can use this cursor to perform queries
cursor = conn.cursor()
# execute our Query
cursor.execute("SELECT * FROM main_prod WHERE id = 1")
# retrieve the records from the database
records = cursor.fetchall()
objects = [
{
'id': row.id,
} for row in records
] # there you tell what data you want to return
json_output = json.dumps(objects)
print(json_output)
if __name__ == "__main__":
main()
I am a bit of a newbie and I'm stuck with this postgres insert step.
My challenge is I am pulling a Dict from a json that is stored in a list and I am trying to pull the values from the dict and save it to a postgres DB.
any help on how to write this up correctly would be appreciated
Here is the connection string for the DB under the page break line is the code used for the db insert.
import psycopg2
'''DATABASE CONNECTION SETTINGS'''
def dbconnect():
"""Function returns settings for db connection."""
dbauth = psycopg2.connect("dbname='databse' user='username' \
host='dbhost' password='password'")
return dbauth
def weatherupdate(dbauth, list):
connection = dbauth
try:
connection
except:
print "I am unable to connect to the database"
conn = connection
cursor = conn.cursor()
l01 = list[0]['state_time_zone']
l02 = list[0]['time_zone']
l03 = list[0]['product_name']
l04 = list[0]['state']
l05 = list[0]['refresh_message']
l06 = list[0]['name']
l11 = list[1]['swell_period']
l12 = list[1]['lat']
l13 = list[1]['lon']
l14 = list[1]['cloud_oktas']
l15 = list[1]['gust_kt']
l16 = list[1]['history_product']
l17 = list[1]['local_date_time']
l18 = list[1]['cloud']
l19 = list[1]['cloud_type']
l110 = list[1]['swell_height']
l111 = list[1]['wmo']
l112 = list[1]['wind_dir']
l113 = list[1]['weather']
l114 = list[1]['wind_spd_kt']
l115 = list[1]['rain_trace']
l116 = list[1]['aifstime_utc']
l117 = list[1]['press_tend']
l118 = list[1]['press']
l119 = list[1]['vis_km']
l120 = list[1]['sea_state']
l121 = list[1]['air_temp']
l122 = list[1]['cloud_base_m']
l123 = list[1]['cloud_type_id']
l124 = list[1]['swell_dir_worded']
l125 = list[1]['sort_order']
query = "INSERT INTO weather (state_time_zone, time_zone, product_name, state, refresh_message, name, swell_period, lat, lon, cloud_oktas, gust_kt, history_product, local_date_time, cloud, cloud_type, swell_height, wmo, wind_dir, weather, wind_spd_kt, rain_trace, aifstime_utc, press_tend, press, vis_km, sea_state, air_temp, cloud_base_m, cloud_type_id, swell_dir_worded, sort_order ) VALUES (l01, l02, l03, l04, l05, l06, l11, l12, l13, l14, l15, l16, l17, l18, l19, l110, l111, l112, l113, l114, l115, l116, l117, l118, l119, l120, l121, l122, l123, l124, l125);"
cursor.execute(query)
conn.commit()
weatherupdate(dbconnect(), getweather())
When i run the code it throws this error:
Traceback (most recent call last):
File "weatherDb.py", line 57, in <module>
weatherupdate(dbconnect(), getweather())
File "weatherDb.py", line 53, in weatherupdate
cursor.execute(query)
psycopg2.ProgrammingError: column "l01" does not exist
LINE 1: ...d_type_id, swell_dir_worded, sort_order ) VALUES (l01, l02, ...
Im sure this is incorrect so any help and direction would be great.
Thanks in advance.
query = """INSERT INTO weather (state_time_zone, time_zone, product_name, [SNIP])
VALUES (%s, %s, %s, [SNIP] ) """
cursor.execute(query, (l01, l02, l03 [SNIP])