I'm a new to python and I need some help with one loop. Here is the code:
def show_result(self):
listed = self.listed
res = {}
for key in listed:
query_result = 0
is_listed = 0
rbl = key
if not listed[key].get('ERROR'):
query_result = "success"
if listed[key]['LISTED']:
is_listed = 1
else:
is_listed = 0
else:
query_result = "error"
pass
res[key] = [ ['rbl', rbl], ['host', listed['SEARCH_HOST']], ['lookup', query_result], ['is_listed', is_listed] ]
return res
try:
con = db.connect(db_server, db_user, db_password, db_name)
cur = con.cursor()
#hosts = json array with Hosts and IP's
for host in hosts:
searcher = checkHost(host, rbls, report)
result = searcher.show_result()
#
# Need to loop through result to get rbl, host, lookup and is_listed variables
# to be able to execute the query and commit it later when loop is finished
#
cur.execute("INSERT INTO checks_reports_df8 (`rbl`, `host`, `lookup`, `is_listed`) VALUES(%s, %s, '%s')", (rbl, host, lookup, is_listed))
con.commit()
except db.Error, e:
if con:
con.rollback()
finally:
if con:
con.close()
If I pprint result from searcher.show_result() here is what I've get:
{u'0spam-killlist.fusionzero.com': [['rbl', u'0spam-killlist.fusionzero.com'],
['host', u'127.0.0.2'],
['lookup', 'success'],
['is_listed', 0]],
u'zen.spamhaus.org': [['rbl', u'zen.spamhaus.org'],
['host', u'127.0.0.2'],
['lookup', 'success'],
['is_listed', 1]]}
My problem is that I don't know how to loop through the result from searcher.show_result(). If my approach is wrong, the result returned from searcher.show_result() can be changed.
Related
import sqlite3
import traceback
from time import sleep
import mysql.connector
def check_user(user_id):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cur = conn.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS online(id INT, last_online_date TEXT)')
conn.commit()
select = "SELECT * FROM online WHERE id = %s LIMIT 0, 1"
result = cur.execute(select, (user_id,))
if result is None:
insert = ('INSERT INTO online (id, last_online_date) VALUES (%s, %s)')
cur.reset()
cur.execute(insert, (user_id, online_time))
conn.commit()
def update_online_status(user_id, online_time):
conn = mysql.connector.connect(host='localhost', database='online', user='root1', password='rootRRR111_')
cursor = conn.cursor()
select = 'SELECT last_online_date FROM online WHERE id = %s'
result = cursor.execute(select, (user_id,))
old_online = result
online_time = f'{old_online},{online_time}'
cursor.reset()
cursor.execute('UPDATE online SET last_online_date = %s WHERE id = %s', (online_time, user_id))
conn.commit()
app = Client("my_account")
app.start()
while True:
try:
with open('ids.ini', 'r') as file:
users = file.read().splitlines()
for user in users:
result = app.get_users(user)
user_id = result['id']
if result['status'] == 'offline':
unix_timestamp = float(result['last_online_date'])
local_timezone = tzlocal.get_localzone()
local_time = datetime.fromtimestamp(unix_timestamp, local_timezone)
online_time = local_time.strftime("%Y/%m/%d %H:%M:%S")
elif result['status'] == 'online':
now = datetime.now()
online_time = now.strftime("%Y/%m/%d %H:%M:%S")
check_user(user_id)
update_online_status(user_id, online_time)
# sleep(300)
except Exception:
traceback.print_exc()
continue
app.stop()
I am writing a program that would read the online status of a user in telegram.
Instead of writing online to an existing user, a huge number of identical rows appear in the database.
Example:
Table with repetitions
When I try to fix something, there are a lot of errors.
mysql.connector.errors.programmingerror: not all parameters were used in the sql statement
mysql.connector.errors.internalerror: unread result found
and other...
Pls help!!
I've already tried adding in a comma after Name and the question mark in "VALUES" and was getting a syntax error for my parthenthesis.
#app.route("/Disease/new", methods = ["POST"])
def addDisease():
newDisease = {}
conn = None
try:
jsonPostData = request.get_json()
Name = jsonPostData["Name"]
conn = sqlite3.connect("./dbs/ContactTracer.db")
conn.row_factory = sqlite3.Row
sql = """
INSERT INTO Disease(Name) VALUES(?)
"""
cursor = conn.cursor()
cursor.execute(sql, (Name))
conn.commit()
sql = """
SELECT Disease.ID, Disease.Name
From Disease
Where Disease.ID = ?
"""
cursor.execute(sql,(cursor.lastrowid,))
row = cursor.fetchone()
newDisease["ID"] = row["ID"]
newDisease["Name"] = row["Name"]
except Error as e:
print(f"Error opening the database{e}")
abort(500)
finally:
if conn:
conn.close()
return newDisease
Remove the () and check if INSERT succeeded
cursor.execute(sql, Name)
...
if cursor.lastrowid:
cursor.execute(sql, cursor.lastrowid)
I have a database named products in sql and i wish to get all the rows as a dictionary or json. I've seen an example here but how do i pass username, password and host?
This is the example:
import json
import psycopg2
def db(database_name='products'):
return psycopg2.connect(database=database_name)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
When i use it like this i'm getting this error:
File "/home/alex/Documents/Proiecte/Python/bapp/venv/lib/python3.5/site-packages/psycopg2/__init__.py", line 130, in connect
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
psycopg2.OperationalError: fe_sendauth: no password supplied
When i'm doing like this
import json
import psycopg2
def db(database_name='products', password='...', host='123.123.123.13', user='alex'):
return psycopg2.connect(database=database_name, password=password, host=host, user=user)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
It won't print anything, it just remains like in sleep.
How can i do it?
Try this:
import psycopg2
import json
def main():
conn_string = "database_name='products', password='...', host='123.123.123.13', user='alex'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
# conn.cursor will return a cursor object, you can use this cursor to perform queries
cursor = conn.cursor()
# execute our Query
cursor.execute("SELECT * FROM main_prod WHERE id = 1")
# retrieve the records from the database
records = cursor.fetchall()
objects = [
{
'id': row.id,
} for row in records
] # there you tell what data you want to return
json_output = json.dumps(objects)
print(json_output)
if __name__ == "__main__":
main()
I have a database class in python which I use to query the database.
class Database():
def __init__(self, user, password, host, port, service_name, mode, *args):
#mode should be 0 if not cx_Oracle.SYSDBA
self.user = user
self.password = password
self.host = host
self.port = port
self.user = user
self.service_name = service_name
self.logger = logging.getLogger(__name__)
self.mode = 0
self.connection = None
self.connect_string = self.user + '/' + self.password + '#' + dsn
try:
self.connection = cx_Oracle.connect(self.connect_string, mode=self.mode, threaded=True)
self.connection.stmtcachesize = 1000
self.connection.client_identifier = 'my_app_scheduler'
self.cursor = self.connection.cursor()
self.cursor.arraysize = 10000
self.idVar = self.cursor.var(cx_Oracle.NUMBER)
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.exception('Exception occured while trying to create database object : %s', error.message)
raise exc
def query(self, q):
try:
self.cursor.execute(q)
return self.cursor.fetchall(), self.cursor.rowcount
except cx_Oracle.DatabaseError, exc:
raise exc
And this is the code to manipulate the fetched data and convert it.
output, rowcount = db_run_query.query(sql_text)
#self.logger.debug('output : %s, type : %s', output, type(output))
end_time=time.time()
time_taken=end_time - start_time
self.logger.debug('Rowcount : %s, time_taken : %s', rowcount, time_taken)
column_name = [d[0] for d in db_run_query.cursor.description]
result = [dict(zip(column_name, row)) for row in output]
#Convert everything to string : Eg: datetime
try:
for each_dict in result:
for key in each_dict:
if isinstance(each_dict[key], cx_Oracle.LOB):
self.logger.debug('%s', each_dict[key].size())
each_dict[key]=each_dict[key].read()
#self.logger.debug('%s %s %s %s %s %s %s', key, each_dict, type(key), type(each_dict[key]), type(each_dict), temp_each_dict, type(temp_each_dict))
else:
each_dict[key]=str(each_dict[key])
except Exception as e:
self.logger.debug(e)
So without self.cursor.arraysize = 10000
and for a query like select clob_value from table it was able to fetch the data and logged Rowcount : 4901, time_taken : 0.196296930313 but was giving me an error like
LOB variable no longer valid after subsequent fetch
but when I mention the arraysize parameter the error goes away. ( Is arraysize only for lob columns coz it works fine for select other_column from table where rownum<20000 <- other_column in varchar)
Why does that happen?
Turns out CLOBs and fetchall don't place nice together:
Internally, Oracle uses LOB locators which are allocated based on the
cursor array size. Thus, it is important that the data in the LOB
object be manipulated before another internal fetch takes place. The
safest way to do this is to use the cursor as an iterator. In
particular, do not use the fetchall() method.
Avoiding cursor.fetchall() and using it like an iterator (e.g. for row in cursor: ...) and i was able to get around this problem.
I'm using Python 2.7 and postgresql 9.1.
Trying to get dictionary from query, I've tried the code as described here:
http://wiki.postgresql.org/wiki/Using_psycopg2_with_PostgreSQL
import psycopg2
import psycopg2.extras
conn = psycopg2.connect("dbname=mydb host=localhost user=user password=password")
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute ("select * from port")
type(cur.fetchall())
It is printing the next answer:
<type 'list'>
printing the item itself, show me that it is list.
The excepted answer was dictionary.
Edit:
Trying the next:
ans = cur.fetchall()[0]
print ans
print type(ans)
returns
[288, 'T', 51, 1, 1, '192.168.39.188']
<type 'list'>
Tnx a lot Andrey Shokhin ,
full answer is:
#!/var/bin/python
import psycopg2
import psycopg2.extras
conn = psycopg2.connect("dbname=uniart4_pr host=localhost user=user password=password")
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute ("select * from port")
ans =cur.fetchall()
ans1 = []
for row in ans:
ans1.append(dict(row))
print ans1 #actually it's return
It's normal: when you call .fetchall() method returns list of tuples. But if you write
type(cur.fetchone())
it will return only one tuple with type:
<class 'psycopg2.extras.DictRow'>
After this you can use it as list or like dictionary:
cur.execute('SELECT id, msg FROM table;')
rec = cur.fetchone()
print rec[0], rec['msg']
You can also use a simple cursor iterator:
res = [json.dumps(dict(record)) for record in cursor] # it calls .fetchone() in loop
Perhaps to optimize it further we can have
#!/var/bin/python
import psycopg2
import psycopg2.extras
def get_dict_resultset(sql):
conn = psycopg2.connect("dbname=pem host=localhost user=postgres password=Drupal#1008")
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute (sql)
ans =cur.fetchall()
dict_result = []
for row in ans:
dict_result.append(dict(row))
return dict_result
sql = """select * from tablename"""
return get_dict_resultset(sql)
If you don't want to use a psycopg2.extras.DictCursor you can create a list of dictionaries for the results using cursor.description:
# connect
connection = psycopg2.connect()
cursor = connection.cursor()
# query
cursor.execute("SELECT * FROM myTable")
# transform result
columns = list(cursor.description)
result = cursor.fetchall()
# make dict
results = []
for row in result:
row_dict = {}
for i, col in enumerate(columns):
row_dict[col.name] = row[i]
results.append(row_dict)
# display
print(result)
I use the following function fairly regularly:
def select_query_dict(connection, query, data=[]):
"""
Run generic select query on db, returns a list of dictionaries
"""
logger.debug('Running query: {}'.format(query))
# Open a cursor to perform database operations
cursor = connection.cursor()
logging.debug('Db connection succesful')
# execute the query
try:
logger.info('Running query.')
if len(data):
cursor.execute(query, data)
else:
cursor.execute(query)
columns = list(cursor.description)
result = cursor.fetchall()
logging.debug('Query executed succesfully')
except (Exception, psycopg2.DatabaseError) as e:
logging.error(e)
cursor.close()
exit(1)
cursor.close()
# make dict
results = []
for row in result:
row_dict = {}
for i, col in enumerate(columns):
row_dict[col.name] = row[i]
results.append(row_dict)
return results
In addition to just return only the query results as a list of dictionaries, I would suggest returning key-value pairs (column-name:row-value). Here my suggestion:
import psycopg2
import psycopg2.extras
conn = None
try:
conn = psycopg2.connect("dbname=uniart4_pr host=localhost user=user password=password")
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("SELECT * FROM table")
column_names = [desc[0] for desc in cursor.description]
res = cursor.fetchall()
cursor.close()
return map(lambda x: dict(zip(column_names, x)), res))
except (Exception, psycopg2.DatabaseError) as e:
logger.error(e)
finally:
if conn is not None:
conn.close()
There is a built in solution to get your result as a collection of dictionary:
from psycopg2.extras import RealDictCursor
cur = conn.cursor(cursor_factory=RealDictCursor)
Modified from: https://www.peterbe.com/plog/from-postgres-to-json-strings, copyright 2013 Peter Bengtsson
For me when I convert the row to dictionary failed (solutions mentioned by others)and also could not use cursor factory.
I am using PostgreSQL 9.6.10, Below code worked for me but I am not sure if its the right way to do it.
def convert_to_dict(columns, results):
"""
This method converts the resultset from postgres to dictionary
interates the data and maps the columns to the values in result set and converts to dictionary
:param columns: List - column names return when query is executed
:param results: List / Tupple - result set from when query is executed
:return: list of dictionary- mapped with table column name and to its values
"""
allResults = []
columns = [col.name for col in columns]
if type(results) is list:
for value in results:
allResults.append(dict(zip(columns, value)))
return allResults
elif type(results) is tuple:
allResults.append(dict(zip(columns, results)))
return allResults
Way to use it:
conn = psycopg2.connect("dbname=pem host=localhost user=postgres,password=Drupal#1008")
cur = conn.cursor()
cur.execute("select * from tableNAme")
resultset = cursor.fetchall()
result = convert_to_dict(cursor.description, resultset)
print(result)
resultset = cursor.fetchone()
result = convert_to_dict(cursor.description, resultset)
print(result)
Contents of './config.py'
#!/usr/bin/python
PGCONF = {
"user": "postgres",
"password": "postgres",
"host": "localhost",
"database": "database_name"
}
contents of './main.py'
#!/usr/bin/python
from config import PGCONF
import psycopg2
import psycopg2.extras
# open connection
conn = psycopg2.connect(**PGCONF)
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# declare lambda function
fetch_all_as_dict = lambda cursor: [dict(row) for row in cursor]
# execute any query of your choice
cur.execute("""select * from table_name limit 1""")
# get all rows as list of dicts
print(fetch_all_as_dict(cur))
# close cursor and connection
cur.close()
conn.close()