How to query MySQL Record value to Python Variables? - python
I want to give python variables with values that I fetch from MySQL database.
#!/usr/bin/python -u
# -*- coding: UTF-8 -*-
import time
import datetime
import mysql.connector
import sys
db = mysql.connector.connect(
host = "localhost",
user = "admin",
password = "admin",
db = "testonly"
)
mycursor = db.cursor()
if __name__ == '__main__':
temp = 0
mycursor.execute("SELECT temperature FROM table ORDER BY primarykey DESC LIMIT 1;") #By selecting one column in a row, I fetch only one record from the talbe.
data = mycursor.fetchone()
for temperature in data:
print(temperature)
temp = data['temperature']
sys.exit()
Then I have error like so:
File "test.py", line 28, in <module>
temp = data['temperature']
TypeError: tuple indices must be integers, not str
In which way I can give value to python variable for later usage?
By default, fetchone returns a tuple with the data from your database. As it currently stands, you need to access your data by index
temp = data[0]
If you want to access your data by the temperature key, you need to use specify your cursor
from mysql.connector.cursor import MySQLCursorDict
...
mycursor = db.cursor(cursor_class=MySQLCursorDict)
...
temp = data['temperature']
Your object data is a tuple and can't be referenced like that. You need to use this:
temp = data[0]
Related
How to use python to insert a date formatted as a bigint datatype in MySQL DB?
I tried to use timestamp to forge a 14 digits int in form of YYYYMMDDHHMMSS in MySQL DB as primary key for later indexing. While 14 digits integer in MySQL is too large for int datatype, so I created table in bigint datatype. I have converted the timestamp into int with my python codes, but while I connected to MySQL, then trying insert 14 digits int into tables with bigint tables. It seems not working. And I queried tables with previous inserted rows for testing, it returns 20771213011177L. With a L in the end. So I noticed it might be the datatypes causing the issue. I try to convert the int into long, but the failure is still there. Is there a way I can insert 14 digits INT into MySQL DB with BIGINT datatype or I have to convert it into some other way? By the way, I'm using python 2. The table was created this way: CREATE TABLE IF NOT EXISTS table_1 ( table_primary_key BIGINT(14) PRIMARY KEY, value1 DOUBLE, value2 DOUBLE, event_date_time DATE ); Then here are codes: from __future__ import print_function from collections import OrderedDict import serial, struct, sys, time, json, subprocess import datetime import mysql.connector mydb = mysql.connector.connect( host = "localhost", user = "admin", password = "S2Bgz4AnVWLQQVun", db = "testdb" ) mycursor = mydb.cursor() JSON_FILE = '/home/yser/proofvalues.json' ... if __name__ == "__main__": currentDateTime = datetime.datetime.now() primaryKey = int(currentDateTime.strftime("%Y%m%d%H%M%S")) primaryKey_long = long(primaryKey) .. # sensor reads and process out with two working variables .. # This json is for proof values. try: with open(JSON_FILE) as json_data: data = json.load(json_data) except IOError as e: data = [] # check if length is more than 1000 and delete first element if len(data) > 1000: data.pop(0) # append new values jsonrow = {'table_primary_key': primaryKey,'value1': values[0], 'value2': values[1], 'event_date_time': currentDateTime.strftime("%Y-%m-%d %H:%M:%S")} data.append(jsonrow) try: mycursor.execute("INSERT INTO table_1(table_primary_key, value1, value2, event_date_time) VALUES (%s, %s, %s, %s)",(primaryKey, values[0], values[1], currentDateTime.strftime("%Y-%m-%d %H:%M:%S"))) mydb.commit() except: mydb.rollback() sys.exit()
multithreading to load data into sqlite db
I'm downloading a data from an API and storing it in SQLite db. I want to implement the process using "multithreading". Can someone please help me with how to implement it. I found a library but getting an error. below is the code. import sqlite3 import os import pandas as pd from sodapy import Socrata import concurrent.futures dbPath = 'folder where db exists' dbName = 'db file name' ## Setup connection & cursor with the DB dbConn = sqlite3.connect(os.path.join(dbPath, dbName), check_same_thread=False) ## Setup the API and bring in the data client = Socrata("health.data.ny.gov", None) ## Define all the countys to be used in threading countys = [all 62 countys in New York] varDict = dict.fromkeys(countys, {}) strDataList = ['test_date', 'LoadDate'] intDataList = ['new_positives', 'cumulative_number_of_positives', 'total_number_of_tests', 'cumulative_number_of_tests'] def getData(county): ## Check if table exists print("Processing ", county) varDict[county]['dbCurs'] = dbConn.cursor() varDict[county]['select'] = varDict[county]['dbCurs'].execute('SELECT name FROM sqlite_master WHERE type="table" AND name=?', (county,) ) if not len(varDict[county]['select'].fetchall()): createTable(county) whereClause = 'county="'+county+'"' varDict[county]['results'] = client.get("xdss-u53e", where=whereClause) varDict[county]['data'] = pd.DataFrame.from_records(varDict[county]['results']) varDict[county]['data'].drop(['county'], axis=1, inplace=True) varDict[county]['data']['LoadDate'] = pd.to_datetime('now') varDict[county]['data'][strDataList] = varDict[county]['data'][strDataList].astype(str) varDict[county]['data']['test_date'] = varDict[county]['data']['test_date'].apply(lambda x: x[:10]) varDict[county]['data'][intDataList] = varDict[county]['data'][intDataList].astype(int) varDict[county]['data'] = varDict[county]['data'].values.tolist() ## Insert values into SQLite varDict[county]['sqlQuery'] = 'INSERT INTO ['+county+'] VALUES (?,?,?,?,?,?)' varDict[county]['dbCurs'].executemany(varDict[county]['sqlQuery'], varDict[county]['data']) dbConn.commit() # for i in dbCurs.execute('SELECT * FROM albany'): # print(i) def createTable(county): sqlQuery = 'CREATE TABLE ['+county+'] ( [Test Date] TEXT, [New Positives] INTEGER NOT NULL, [Cumulative Number of Positives] INTEGER NOT NULL, [Total Number of Tests Performed] INTEGER NOT NULL, [Cumulative Number of Tests Performed] INTEGER NOT NULL, [Load date] TEXT NOT NULL, PRIMARY KEY([Test Date]))' varDict[county]['dbCurs'].execute(sqlQuery) # for _ in countys: # getData(_) # x = countys[:5] with concurrent.futures.ThreadPoolExecutor() as executor: # results = [executor.submit(getData, y) for y in x] executor.map(getData, countys) getData is the function which brings in the data county wise and loads into the db. Countys is a list of all the countys. I am able to do it synchronously but would like to implement multithreading. The for loop to do it synchronously (which works) is for _ in countys: getData(_) The error message is ProgrammingError: SQLite objects created in a thread can only be used in that same thread. The object was created in thread id 8016 and this is thread id 19844.
You might find this useful sqlite.connect(":memory:", check_same_thread=False)
Last value from mySQL in python (Query)
Trying to get the last value from MySQL on Raspberry Pi. No idea why my simple code wont work, gives error at "execute() first" at row = cursor.fetchone(). Here is my code: # External module imports import time import os import datetime import MySQLdb # Connect to mysql db=MySQLdb.connect("localhost","zikmir","gforce","temp_database") # Prepair a cursor cursor=db.cursor() # Select three columns, id, time and temp from table time_temp cursor.execute = ("SELECT id, time, temp FROM time_temp") # ID is autoincremented value, time is in TIME and temp is float row = cursor.fetchone() # Trying to store the last result in variable row # Close cursor and database cursor.close() db.close()
watch the = incursor.execute = ("SELECT id, time, temp FROM time_temp"). It should read cursor.execute("SELECT...")
Data type conversion via sql query
I have problem with data type conversion. Using django and pypyodbc lib I'm trying to recieive data from oracle DB (external) and save it into local app DB. import pypyodbc def get_data(request): conn = pypyodbc.connect("DSN=...") cursor = conn.cursor() cursor.execute("SELECT value FROM table") data = cursor.fetchall() for row in data: d = External_Data(first_val = row[0]) d.save() The output from value is "0,2" and I've received error message: could not convert string to float: b',02' When I changed sql statement to: SELECT cast(value as numeric(10,2) from table) I received error message: [<class 'decimal.ConversionSyntax'>] How to change that data to get float data and save it. I use DecimalField(max_digits=10, decimal_places=2) as model field.
I think this problem comes with implicit type change. when you get data from your get_data function, row[0] var in for loop is seemed like Bytes variable. So First of all, I recommend to check row[0]'s data type with print(type(row[0])). If result is Bytes, you can do like this: import pypyodbc def get_data(request): conn = pypyodbc.connect("DSN=...") cursor = conn.cursor() cursor.execute("SELECT value FROM table") data = cursor.fetchall() for row in data: data = float(str(row[0]).replace(',','.')) d = External_Data(first_val=data) d.save()
CSV - MYSQL Using Python
After reading several inputs I still can't get this to work. Most likely I'm doing it all wrong but I've tried several different approaches What I'm trying to do is extract data from a CSV and add it into my newly created database/table My csv input look like this NodeName,NeId,Object,Time,Interval,Direction,NeAlias,NeType,Position,AVG,MAX,MIN,percent_0-5,percent_5-10,percent_10-15,percent_15-20,percent_20-25,percent_25-30,percent_30-35,percent_35-40,percent_40-45,percent_45-50,percent_50-55,percent_55-60,percent_60-65,percent_65-70,percent_70-75,percent_75-80,percent_80-85,percent_85-90,percent_90-95,percent_95-100,IdLogNum,FailureDescription X13146PAZ,5002,1/11/100,2016-05-16 00:00:00,24,Near End,GE0097-TN01.1,AMM 20PB,-,69684,217287,772,10563,8055,10644,15147,16821,13610,7658,2943,784,152,20,3,0,0,0,0,0,0,0,0,0,- ... X13146PAZ,5002,1/11/102,2016-05-16 00:00:00,24,Near End,GE0097-TN01.1,AMM 20PB,-,3056,28315,215,86310,90,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,- ... X13146PAZ,5002,1/11/103,2016-05-16 00:00:00,24,Near End,GE0097-TN01.1,AMM 20PB,-,769,7195,11,86400,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,- The mysql table is created but possibly that might be the issue as some ar varchar columns and some are integer columns My server is a Ubuntu if that is of any use My Code # -*- coding: utf-8 -*- #Imports from datetime import date, timedelta import sys import MySQLdb as mdb import csv import os #Vars Yesterday = date.today() - timedelta(1) #Opening document RX_Document = open('./reports/X13146PAZ_TN_WAN_ETH_BAND_RX_' + Yesterday.strftime("%Y%m%d") + "_231500.csv" , 'r') RX_Document_Str = './reports/X13146PAZ_TN_WAN_ETH_BAND_RX_' + Yesterday.strftime("%Y%m%d") + "_231500.csv" csv_data = csv.reader(file(RX_Document_Str)) con = mdb.connect('localhost', 'username', 'password','tn_rx_utilization'); counter = 0 for row in csv_data: if counter == 0: print row continue counter = 1 if counter == 1: cur = con.cursor() cur.execute('INSERT INTO RX_UTIL(NodeName, NeId, Object, Time, Interval1,Direction,NeAlias,NeType,Position,AVG,MAX,MIN,percent_5-10,percent_10-15,percent_15-20,percent_20-25,percent_25-30,percent_30-35,percent_35-40,percent_40-45,percent_45-50,percent_50-55,percent_55-60,percent_60-65,percent_65-70,percent_70-75,percent_75-80,percent_80-85,percent_85-90,percent_90-95,percent_95-100,IdLogNum,FailureDescription)' 'VALUES("%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s")',tuple(row[:34])) con.commit() #cur.execute("SELECT VERSION()") #ver = cur.fetchone() con.commit() con.close()
You should not put the placeholder %s in quotes ": cur.execute('''INSERT INTO RX_UTIL(NodeName, NeId, Object, Time, Interval1,Direction, NeAlias,NeType,Position,AVG,MAX,MIN,"percent_5-10","percent_10-15", "percent_15-20","percent_20-25","percent_25-30","percent_30-35", "percent_35-40","percent_40-45","percent_45-50","percent_50-55", "percent_55-60","percent_60-65","percent_65-70","percent_70-75", "percent_75-80","percent_80-85","percent_85-90","percent_90-95", "percent_95-100",IdLogNum,FailureDescription) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)''', tuple(row[:33]))
You are missing Percent_0-5 from your Insert Remove the quotes from the %s references, this needs to be in String format, but the underlying data type will be passed. There may be issues with datatype resulting from the csv reader. Have Python eval() the csv data to alter type as an INT. Here is some more information from another post: Read data from csv-file and transform to correct data-type cur.execute('INSERT INTO RX_UTIL(NodeName, NeId, Object, Time, Interval1,Direction,NeAlias,NeType,Position,AVG,MAX,MIN,percent_0-5,percent_5-10,percent_10-15,percent_15-20,percent_20-25,percent_25-30,percent_30-35,percent_35-40,percent_40-45,percent_45-50,percent_50-55,percent_55-60,percent_60-65,percent_65-70,percent_70-75,percent_75-80,percent_80-85,percent_85-90,percent_90-95,percent_95-100,IdLogNum,FailureDescription)' 'VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',tuple(row[:34]))