"KeyError: 0" when reading a fetched row - python

I retrieve the data from database and retrun it from this method.
def _db_execute(s, query, data=None, return_data=False):
con = mdb.connect(s.sql_detail['host'], s.sql_detail['user'], s.sql_detail['pass'], s.sql_detail['db'], cursorclass=mdb.cursors.DictCursor)
with con:
cur = con.cursor()
if type(data) in [ list, tuple ] and len(data) != 0:
if data[0] in [ list, tuple ]:
cur.executemany(query, (item for item in data))
else:
cur.execute(query, data)
elif data != None:
cur.execute(query, [data])
else:
cur.execute(query)
if return_data:
data = cur.fetchall()
if len(data) == 0:
data = None
return data
Following method retrieve data.
def retrieve_portal_credetials(self):
if(self.valid_user()):
query2 ='''SELECT `group`,`username`,`password` FROM db.users u, db.groups g, db.user_groups i where u.userid=i.user_id and g.group_id = i.groupd_id and u.username=%s'''
portal_data= self._db_execute(query=query2, data = self.username, return_data = True)
return portal_data
I try to assign the data to variables here
rows = auth_usr.retrieve_portal_credetials()
#list_of_portaldata = list(portal_data)
#s.data.update({'groups_list': [val]})
#val= list_of_portaldata
for row in rows:
portal_group = row[0]
portal_username = row[1]
portal_password = row[2]
When I debug through the code, I found it breaks here portal_group = row[0] and I got the error KeyError: 0
What I understood was, row doesn't has 0 key. That's why I get this error. However, in the debugger it shows 3 variables under row. Can anybody give any hint to fix this?

After inspecting the datastructure shows in the debugger.
I use the following syntax to access the data.
portal_group = row['group']
portal_username = row['username']
portal_password = row['password']

Related

Performance issue - code written to read data from xlsx and store it in database

I am trying to insert data to my database. I thought doing a bulk insert would help , but it only reduced my execution time by 1 min. For 12k records my program is taking ~ 13 min to insert the data.
Is there any thing I can change in my code or use something with which I can increase the performance of my code.
Reading from Issuer and service area tables is a simple select id from table query.
I guess the max time it is taking to generate my MedicalPlan_dict.Can I change something here.
def medical_plan_upload(self):
medicalplanread = ReadExcel()
getmodeldata = GetModelData()
# read plans excel
df = medicalplanread.readplanexcel()
MedicalPlan_dict = []
try:
headers = getmodeldata.get_medical_plans_headers()
dataframe = df[df.columns.intersection(headers)]
commonColumns = dataframe.columns.tolist()
# iterate through the dataframe - get value for issuer table and insert/update in medical plan table
for index, frame in df.iterrows():
temp_dict = {}
# reading from Issuer table
issuerid = getmodeldata.get_issuer(frame['wellthie_issuer_identifier'])
# reading from Service_area table
service_area_id = getmodeldata.get_serviceArea(frame['service_area_identifier'])
if service_area_id is not None and issuerid is not None:
# reading from medical plans table
medical_plan = getmodeldata.get_medicalplans(issuerid, frame['hios_plan_identifier'], frame['plan_year'],
frame['group_or_individual_plan_type'])
if not medical_plan: # check if medical_plan is []
medical_plans_data = frame.to_dict()
for key, value in medical_plans_data.iteritems():
if key in commonColumns:
if pd.isna(value):
value = None
temp_dict[key] = value
else:
temp_dict[key] = value
temp_dict["issuer_id"] = issuerid
temp_dict["service_area_id"] = service_area_id
MedicalPlan_dict.append(temp_dict)
# new_plan = MedicalPlan(**temp_dict)
# sess.add(new_plan)
# sess.commit()
else:
print ("row number..", index, " is already in the db...")
continue
else:
print ("row number.. ", index, "is invalid")
engine = create_engine('postgresql+psycopg2://postgres:postgres#localhost/test_biz_dev')
conn = engine.connect()
conn.execute(MedicalPlan.__table__.insert(), MedicalPlan_dict)
except Exception as e:
print e
finally:
conn.close()

python to write data into table error

write python program to create a mysql table and insert data into this table,the program is as follows:
def pre_data_db_manage(type,data):
conn = pymysql.connect(host="localhost", port=3306, user="root", passwd="********", db="facebook_info",charset="utf8")
cur = conn.cursor()
if type == "pre_davi_group_members_data":
is_exist_table_sql = "SHOW TABLES LIKE 'fb_pre_davi_group_members_posts'"
if cur.execute(is_exist_table_sql) == 0:
create_table_sql = '''CREATE TABLE fb_pre_davi_group_members_posts (id bigint not null primary key auto_increment,userID bigint,userName varchar(128),userURL varchar(256),
postTime varchar(128),postText text,postTextLength int,likesCount int,sharesCount int,commentsCount int,postTextPolarity varchar(64),postTextSubjectivity varchar(64))'''
cur.execute(create_table_sql)
r = re.compile(r'^[a-zA-Z0-9]')
for item in data:
if "'" in item["PostText"]:
item["PostText"] = item["PostText"].replace("'"," ")
if "\\" in item["PostText"]:
item["PostText"] = item["PostText"].replace("\\","\\\\")
for i in item["PostText"]:
result = r.match(i)
if result == None:
print("in re")
item['PostText'] = item['PostText'].replace(i, ' ')
if "nan" in item["SharesCount"]:
item["SharesCount"] = 0
if "nan" in item["LikesCount"]:
item["LikesCount"] = 0
if "nan" in item["CommentsCount"]:
item["CommentsCount"] = 0
if "nan" in item["PostTextLength"]:
item["PostTextLength"] = 0
item["PostTextLength"] = int(item["PostTextLength"])
item["LikesCount"] = int(item["LikesCount"])
item["SharesCount"] = int(item["SharesCount"])
item["CommentsCount"] = int(item["CommentsCount"])
if type == "pre_davi_group_members_data":
insert_sql = '''INSERT INTO fb_pre_davi_group_members_posts (userID,userName,userURL,
postTime,postText,postTextLength,likesCount,sharesCount,commentsCount,postTextPolarity,postTextSubjectivity) VALUES
({0},"{1}",'{2}','{3}','{4}',{5},{6},{7},{8},{9},{10})'''.format(item["UserID"],item["UserName"],item["UserURL"],item["PostTime"],item["PostText"],item["PostTextLength"],item["LikesCount"],item["SharesCount"],item["CommentsCount"],item["PostTextPolarity"],item["PostTextSubjectivity"])
print(insert_sql)
try:
cur.execute(insert_sql)
except Exception as e:
print("insert error")
continue
cur.close()
conn.commit()
conn.close()
and write call statement as follows:
type = "pre_davi_group_members_data"
pre_data_db_manage(type, df_list)
however,when execute this program, found that no data have been inserted into table:fb_pre_davi_group_members_posts,
in the mysql order line, write:
select count(*) from fb_pre_davi_group_members_posts;
the result is 0
could you please tell me the reason and how to solve it

error to pass value in query Django

Hi everyone I have problem with this query in Django
projects_name = str(kwargs['project_name']).split(',')
status = str(kwargs['status'])
list_project = tuple(projects_name)
opc_status = {'jobs_running':'running', 'jobs_pending':'pending', 'cpus_used':'cpu'}
if status in opc_status.values():
key = list(opc_status.keys())[list(opc_status.values()).index(status)] + ', entry_dt'
else:
key='*'
db = MySQLdb.connect(host='..', port=, user='..', passwd='..', db='..')
try:
cursor = db.cursor()
cursor.execute('SELECT %s FROM proj_cpus WHERE project in %s', key, list_project])
the first params of the query must be * or something like jobs_pending, entry_dt
but query return this error
tuple index out of range
Any idea about how to create the query correctly?
You could try this:
# Build a comma-separated string of all items in list_project
data_list = ', '.join([item for item in list_project])
query = 'SELECT %s FROM proj_cpus WHERE project in (%s)'
# Supply the parameters in the form of a tuple
cursor.execute(query, (key, data_list))
cursor.fetchall() will always return data in tuples like you have observed in comments, it is not because there is an issue with the query. To convert to json you could do something like the following (row_counter is just a placeholder to make sure that there is a unique key for every entry).
import json
key = '*'
data_list = ', '.join([item for item in list_project])
query = 'SELECT %s FROM proj_cpus WHERE project in (%s)'
cursor.execute(query, (key, data_list))
all_rows = cursor.fetchall()
row_headings = [header[0] for header in cursor.description]
row_counter = 0
all_rows_container = {}
for item in all_rows:
item_dict = {row_headings[x]: item[x] for x in range(len(row_headings))}
all_rows_container[row_counter] = item_dict
row_counter += 1
json_data = json.dumps(all_rows_container)
print json_data
NOTE: the above may throw IndexError if the query is not with key = '*' because I think row_headings will contain all of the schema for the table, even for values that you did not select in the query. However, it should be sufficient to demonstrate the approach and you can tailor it in the event that you pick specific columns only.

Python MySQLdb Cursor returns empty results

Here is my code snippet...
First I've called __execute_query function to insert some rows and then to select few rows..
However, if executed a select query the function returns None or Empty rows... However manual sql query returns few rows..
def __execute_query(self, query, parameters = [], set_row_id = False):
conn = MySQL.get_conn()
cursor = conn.cursor()
cursor.execute(query, parameters)
result = None
query_type = query[0:query.find(" ")]
query_type = query_type.lower()
if query_type in ('insert', 'update', 'delete'):
result = cursor.rowcount > 0
if result:
conn.commit()
if query_type == 'insert' and set_row_id == True:
"""
Update the object with the newly saved row ID and mark
the object as not new.
"""
self.__new_row = False
self.__row_id = conn.insert_id()
if self.__row_id == 0 or self.__row_id == "0":
self.__row_id = cursor.lastrowid
else:
result = cursor.fetchall()
cursor.close()
return result

local variable 'servers' referenced before assignment

def websvc(currency):
db = MySQLdb.connect("localhost", "root", "aqw", "PFE_Project")
cursor = db.cursor()
sql = "SELECT * FROM myform_composantsserveur"
try:
cursor.execute(sql)
results = cursor.fetchall()
currency_in = currency
req = urllib2.urlopen('http://rate-exchange.appspot.com/currency?from=USD&to=%s') % (currency_in)
req1 = req.read()
rate = int(req1['rate'])
# rate = 0.77112893299999996
servers = []
for row in results:
result = {}
result['1'] = row[1]
result['3'] = int(row[2])
result['4'] = int(row[3])
result['5'] = int(row[4])
result['6'] = row[5]
result['7'] = int(row[6])
result['8'] = row[7]
result['9'] = row[8]
p = rate * calculations_metric (int(row[2]), int(row[3]), int(row[4]), int(row[6]), row[7])
result['2'] = p
keys = result.keys()
keys.sort()
servers.append(result)
except:
print "Error: unable to fetch data"
db.close()
return servers
but i have this error while compiling the code :
Exception Type: UnboundLocalError
Exception Value: local variable
'servers' referenced before assignment
Exception Location: /home/amine/PFE Directory/mysite1/myform/Webservice.py in websvc, line 43 Python Executable: /usr/bin/python2.7
this code works normally before i added a parameter in this function
Your code not able to reach servers initialization and that is why you getting error. Simply move initialization before try..except. Change this way:
def websvc(currency):
db = MySQLdb.connect("localhost", "root", "aqw", "PFE_Project")
cursor = db.cursor()
sql = "SELECT * FROM myform_composantsserveur"
servers = []
try:
cursor.execute(sql)
results = cursor.fetchall()
currency_in = currency
req = urllib2.urlopen('http://rate-exchange.appspot.com/currency?from=USD&to=%s') % (currency_in)
req1 = req.read()
rate = int(req1['rate'])
# rate = 0.77112893299999996
for row in results:
result = {}
result['1'] = row[1]
result['3'] = int(row[2])
result['4'] = int(row[3])
result['5'] = int(row[4])
result['6'] = row[5]
result['7'] = int(row[6])
result['8'] = row[7]
result['9'] = row[8]
p = rate * calculations_metric (int(row[2]), int(row[3]), int(row[4]), int(row[6]), row[7])
result['2'] = p
keys = result.keys()
keys.sort()
servers.append(result)
except:
print "Error: unable to fetch data"
db.close()
return servers
I see the problem now you have edited it to add the missing parts. It's the exception handler.
If you have an error after try and before servers=[] it will jump to the except clause, then see return servers and fail.
You might want to use a list(), instead of using a dict() to emulate a list ...
You can make the empty variable also in the try block if you check against the globals() variables any time after the try block. This is no game changer in this code since making a new empty list will never fail, but I could use it to have the opening of a connection into the try block so that it would be caught in the exception, and I could close that object in the finally block without having to make an empty object before the try/except/finally block (tested).
def websvc(currency):
db = MySQLdb.connect("localhost", "root", "aqw", "PFE_Project")
cursor = db.cursor()
sql = "SELECT * FROM myform_composantsserveur"
try:
servers = []
cursor.execute(sql)
results = cursor.fetchall()
currency_in = currency
req = urllib2.urlopen('http://rate-exchange.appspot.com/currency?from=USD&to=%s') % (currency_in)
req1 = req.read()
rate = int(req1['rate'])
# rate = 0.77112893299999996
for row in results:
result = {}
result['1'] = row[1]
result['3'] = int(row[2])
result['4'] = int(row[3])
result['5'] = int(row[4])
result['6'] = row[5]
result['7'] = int(row[6])
result['8'] = row[7]
result['9'] = row[8]
p = rate * calculations_metric (int(row[2]), int(row[3]), int(row[4]), int(row[6]), row[7])
result['2'] = p
keys = result.keys()
keys.sort()
servers.append(result)
except:
print "Error: unable to fetch data"
db.close()
if 'servers' in globals():
return servers
else:
return []
This is untested. If it crashes at servers.append(result), try if 'servers' in globals(): right before that as well. Which would blow up the code of the try block, therefore, I hope that it is not needed, and in my example, I also did not have to do that when I used the called connection afterwards in the try block.
Side remark: append() makes a full copy. Try servers.extend([result]) instead if you grow a large list (not likely if you just count up just a few servers).

Categories