Insert a dict into a sql table - python

i wanted to insert some data into my sql but having trouble because there is alot of columns so i would have to write alot of parameters after VALUE. I have a table with all attributes from the json file and a player_id which i add myself
#Gamelogs for players and Teams
import requests
import json
import psycopg2
# Connect to your postgres DB
conn = psycopg2.connect("dbname=NBA user=postgres password=********")
# Open a cursor to perform database operations
cur = conn.cursor()
cur.execute('CREATE TABLE player_logs("player_id" int,"GameId" int,"Date" int,"Team" VARCHAR(10),"Opponent" VARCHAR(10),"Minutes" int,"Arc3Assists" int,"Arc3FGA" int,"Arc3Frequency" int,"AssistPoints" int,"Assists" int,"AtRimAssists" int,"AtRimFG3AFrequency" int,"Avg2ptShotDistance" int,"Avg3ptShotDistance" int,"BadPassOutOfBoundsTurnovers" int,"BadPassSteals" int,"BadPassTurnovers" int,"Corner3FGA" int,"Corner3Frequency" int,"DeadBallTurnovers" int,"DefArc3ReboundPct" int,"DefFGReboundPct" int,"DefPoss" int,"DefRebounds" int,"DefThreePtReboundPct" int,"DefThreePtRebounds" int,"EfgPct" int,"FG2A" int,"FG2M" int,"FG3A" int,"FG3APct" int,"FTA" int,"Fg2Pct" int,"FirstChancePoints" int,"Fouls" int,"FoulsDrawn" int,"FtPoints" int,"LiveBallTurnoverPct" int,"LiveBallTurnovers" int,"LongMidRangeAccuracy" int,"LongMidRangeAssists" int,"LongMidRangeFGA" int,"LongMidRangeFGM" int,"LongMidRangeFrequency" int,"Loose Ball Fouls" int,"LostBallTurnovers" int,"NonHeaveArc3FGA" int,"OffFGReboundPct" int,"OffPoss" int,"OffRebounds" int,"OffShortMidRangeReboundPct" int,"OffTwoPtReboundPct" int,"OffTwoPtRebounds" int,"OnDefRtg" int,"OnOffRtg" int,"PenaltyArc3FGA" int,"PenaltyArc3Frequency" int,"PenaltyDefPoss" int,"PenaltyEfgPct" int,"PenaltyFG2A" int,"PenaltyFG2M" int,"PenaltyFG3A" int,"PenaltyFg2Pct" int,"PenaltyOffPoss" int,"PenaltyOffPossExcludingTakeFouls" int,"PenaltyOffPossPct" int,"PenaltyPoints" int,"PenaltyPointsExcludingTakeFouls" int,"PenaltyPointsPct" int,"PenaltyShotQualityAvg" int,"PenaltyTsPct" int,"PenaltyTurnovers" int,"Period2Fouls2Minutes" int,"Period3Fouls3Minutes" int,"PlusMinus" int,"Points" int,"PtsUnassisted2s" int,"Rebounds" int,"SecondChanceOffPoss" int,"SelfOReb" int,"SelfORebPct" int,"ShootingFouls" int,"ShootingFoulsDrawnPct" int,"ShortMidRangeAccuracy" int,"ShortMidRangeAssists" int,"ShortMidRangeFGA" int,"ShortMidRangeFGM" int,"ShortMidRangeFrequency" int,"ShortMidRangeOffReboundedPct" int,"ShotQualityAvg" int,"Steals" int,"ThreePtAssists" int,"TotalPoss" int,"TsPct" int,"Turnovers" int,"TwoPtAssists" int,"TwoPtShootingFoulsDrawn" int,"TwoPtShootingFoulsDrawnPct" int,"UnblockedLongMidRangeAccuracy" int,"UnblockedShortMidRangeAccuracy" int,"Usage" int,"Arc3Accuracy" int,"Arc3FGM" int,"Arc3PctAssisted" int,"Assisted2sPct" int,"Assisted3sPct" int,"AtRimAccuracy" int,"AtRimFGA" int,"AtRimFGM" int,"AtRimFrequency" int,"AtRimOffReboundedPct" int,"AtRimPctBlocked" int,"Blocked2s" int,"BlockedShortMidRange" int,"Blocks" int,"BlocksRecoveredPct" int,"Corner3Assists" int,"DefAtRimReboundPct" int,"DefLongMidRangeReboundPct" int,"DefShortMidRangeReboundPct" int,"DefTwoPtReboundPct" int,"DefTwoPtRebounds" int,"FG2APctBlocked" int,"FG3M" int,"Fg2aBlocked" int,"Fg3Pct" int,"LongMidRangeOffReboundedPct" int,"LostBallSteals" int,"NonHeaveArc3Accuracy" int,"NonHeaveArc3FGM" int,"NonHeaveFg3Pct" int,"NonPutbacksAssisted2sPct" int,"NonShootingFoulsDrawn" int,"NonShootingPenaltyNonTakeFoulsDrawn" int,"OffLongMidRangeReboundPct" int,"Offensive Fouls Drawn" int,"PenaltyArc3Accuracy" int,"PenaltyArc3FGM" int,"PenaltyAtRimAccuracy" int,"PenaltyAtRimFGA" int,"PenaltyAtRimFGM" int,"PenaltyAtRimFrequency" int,"PenaltyFG3M" int,"PenaltyFg3Pct" int,"PenaltyFtPoints" int,"PtsAssisted2s" int,"PtsAssisted3s" int,"PtsPutbacks" int,"PtsUnassisted3s" int,"RecoveredBlocks" int,"SecondChanceArc3FGA" int,"SecondChanceArc3Frequency" int,"SecondChanceEfgPct" int,"SecondChanceFG2A" int,"SecondChanceFG2M" int,"SecondChanceFG3A" int,"SecondChanceFg2Pct" int,"SecondChancePoints" int,"SecondChancePointsPct" int,"SecondChanceShotQualityAvg" int,"SecondChanceTsPct" int,"ShortMidRangePctAssisted" int,"ShortMidRangePctBlocked" int,"ThreePtShootingFoulsDrawn" int,"ThreePtShootingFoulsDrawnPct" int,"UnblockedArc3Accuracy" int,"UnblockedAtRimAccuracy" int,"OffArc3ReboundPct" int,"OffThreePtReboundPct" int,"OffThreePtRebounds" int,"Offensive Fouls" int,"Corner3Accuracy" int,"Corner3FGM" int,"ThreePtOffReboundedPct" int,"UnblockedCorner3Accuracy" int,"DefFTReboundPct" int,"FTDefRebounds" int,"Technical Free Throw Trips" int,"BlockedAtRim" int,"LostBallOutOfBoundsTurnovers" int,"OffAtRimReboundPct" int,"BlockedLongMidRange" int,"Charge Fouls Drawn" int,"LongMidRangePctAssisted" int,"NonShootingPenaltyNonTakeFouls" int,"SecondChanceTurnovers" int,"Travels" int,"SecondChanceAtRimFGA" int,"SecondChanceAtRimFrequency" int,"Clear Path Fouls" int,"DefCorner3ReboundPct" int,"HeaveAttempts" int,"LongMidRangePctBlocked" int,"2pt And 1 Free Throw Trips" int,"AtRimPctAssisted" int,"Period3Fouls4Minutes" int,"Period4Fouls4Minutes" int,"Charge Fouls" int,"Loose Ball Fouls Drawn" int,"PeriodOTFouls4Minutes" int,"SecondChanceAtRimAccuracy" int,"SecondChanceAtRimFGM" int,"PenaltyCorner3FGA" int,"PenaltyCorner3Frequency" int,"Corner3PctAssisted" int,"SecondChanceFtPoints" int,"OffCorner3ReboundPct" int,"SecondChanceArc3Accuracy" int,"SecondChanceArc3FGM" int,"SecondChanceFG3M" int,"SecondChanceFg3Pct" int,"3pt And 1 Free Throw Trips" int,"Defensive 3 Seconds Violations" int,"Period4Fouls5Minutes" int,"StepOutOfBoundsTurnovers" int,"Period1Fouls2Minutes"int)')
x = 'https://api.pbpstats.com/get-all-players-for-league/nba'
headers = {'user-agent': 'Chrome/88.0.4324.190'}
jsonData1 = requests.get(x, headers=headers).json() # Player id and name
EntityId = json.loads(json.dumps(jsonData1)[12:-1])
SeasonType = {'R':'Regular+Season','P':'Playoff+Season','A':'All'}
EntityType = {'P':'Player','T':'Team'}
Season = {
'2008-09',
'2009-10',
'2010-11',
'2011-12',
'2012-13',
'2013-14',
'2014-15',
'2015-16',
'2016-17',
'2017-18',
'2018-19',
'2019-20',
'2020-21'
}
def log (S:Season,ST:SeasonType,EI:EntityId,ET:EntityType):
url = 'https://api.pbpstats.com/get-game-logs/nba'
payload = {
'Season': S,
'SeasonType': ST,
'EntityId': EI,
'EntityType': ET
}
r = requests.get(url, headers=headers, params=payload).json()
if r == {'error': 'no results'} :
return()
else :
for c in r['multi_row_table_data']:
j = {'Player_id':EI}
c.update(j)
cur.execute('INSERT INTO player_log (Player_id,GameId,Date,Team,Opponent,Minutes,Arc3Assists,Arc3FGA,Arc3Frequency,AssistPoints,Assists,AtRimAssists,AtRimFG3AFrequency,Avg2ptShotDistance,Avg3ptShotDistance,BadPassOutOfBoundsTurnovers,BadPassSteals,BadPassTurnovers,Corner3FGA,Corner3Frequency,DeadBallTurnovers,DefArc3ReboundPct,DefFGReboundPct,DefPoss,DefRebounds,DefThreePtReboundPct,DefThreePtRebounds,EfgPct,FG2A,FG2M,FG3A,FG3APct,FTA,Fg2Pct,FirstChancePoints,Fouls,FoulsDrawn,FtPoints,LiveBallTurnoverPct,LiveBallTurnovers,LongMidRangeAccuracy,LongMidRangeAssists,LongMidRangeFGA,LongMidRangeFGM,LongMidRangeFrequency,Loose_Ball_Fouls,LostBallTurnovers,NonHeaveArc3FGA,OffFGReboundPct,OffPoss,OffRebounds,OffShortMidRangeReboundPct,OffTwoPtReboundPct,OffTwoPtRebounds,OnDefRtg,OnOffRtg,PenaltyArc3FGA,PenaltyArc3Frequency,PenaltyDefPoss,PenaltyEfgPct,PenaltyFG2A,PenaltyFG2M,PenaltyFG3A,PenaltyFg2Pct,PenaltyOffPoss,PenaltyOffPossExcludingTakeFouls,PenaltyOffPossPct,PenaltyPoints,PenaltyPointsExcludingTakeFouls,PenaltyPointsPct,PenaltyShotQualityAvg,PenaltyTsPct,PenaltyTurnovers,Period2Fouls2Minutes,Period3Fouls3Minutes,PlusMinus,Points,PtsUnassisted2s,Rebounds,SecondChanceOffPoss,SelfOReb,SelfORebPct,ShootingFouls,ShootingFoulsDrawnPct,ShortMidRangeAccuracy,ShortMidRangeAssists,ShortMidRangeFGA,ShortMidRangeFGM,ShortMidRangeFrequency,ShortMidRangeOffReboundedPct,ShotQualityAvg,Steals,ThreePtAssists,TotalPoss,TsPct,Turnovers,TwoPtAssists,TwoPtShootingFoulsDrawn,TwoPtShootingFoulsDrawnPct,UnblockedLongMidRangeAccuracy,UnblockedShortMidRangeAccuracy,Usage,Arc3Accuracy,Arc3FGM,Arc3PctAssisted,Assisted2sPct,Assisted3sPct,AtRimAccuracy,AtRimFGA,AtRimFGM,AtRimFrequency,AtRimOffReboundedPct,AtRimPctBlocked,Blocked2s,BlockedShortMidRange,Blocks,BlocksRecoveredPct,Corner3Assists,DefAtRimReboundPct,DefLongMidRangeReboundPct,DefShortMidRangeReboundPct,DefTwoPtReboundPct,DefTwoPtRebounds,FG2APctBlocked,FG3M,Fg2aBlocked,Fg3Pct,LongMidRangeOffReboundedPct,LostBallSteals,NonHeaveArc3Accuracy,NonHeaveArc3FGM,NonHeaveFg3Pct,NonPutbacksAssisted2sPct,NonShootingFoulsDrawn,NonShootingPenaltyNonTakeFoulsDrawn,OffLongMidRangeReboundPct,Offensive_Fouls_Drawn,PenaltyArc3Accuracy,PenaltyArc3FGM,PenaltyAtRimAccuracy,PenaltyAtRimFGA,PenaltyAtRimFGM,PenaltyAtRimFrequency,PenaltyFG3M,PenaltyFg3Pct,PenaltyFtPoints,PtsAssisted2s,PtsAssisted3s,PtsPutbacks,PtsUnassisted3s,RecoveredBlocks,SecondChanceArc3FGA,SecondChanceArc3Frequency,SecondChanceEfgPct,SecondChanceFG2A,SecondChanceFG2M,SecondChanceFG3A,SecondChanceFg2Pct,SecondChancePoints,SecondChancePointsPct,SecondChanceShotQualityAvg,SecondChanceTsPct,ShortMidRangePctAssisted,ShortMidRangePctBlocked,ThreePtShootingFoulsDrawn,ThreePtShootingFoulsDrawnPct,UnblockedArc3Accuracy,UnblockedAtRimAccuracy,OffArc3ReboundPct,OffThreePtReboundPct,OffThreePtRebounds,Offensive_Fouls,Corner3Accuracy,Corner3FGM,ThreePtOffReboundedPct,UnblockedCorner3Accuracy,DefFTReboundPct,FTDefRebounds,Technical_Free_Throw_Trips,BlockedAtRim,LostBallOutOfBoundsTurnovers,OffAtRimReboundPct,BlockedLongMidRange,Charge_Fouls_Drawn,LongMidRangePctAssisted,NonShootingPenaltyNonTakeFouls,SecondChanceTurnovers,Travels,SecondChanceAtRimFGA,SecondChanceAtRimFrequency,Clear_Path_Fouls,DefCorner3ReboundPct,HeaveAttempts,LongMidRangePctBlocked,"2pt_And_1_Free_Throw_Trips",AtRimPctAssisted,Period3Fouls4Minutes,Period4Fouls4Minutes,Charge_Fouls,Loose_Ball_Fouls_Drawn,PeriodOTFouls4Minutes,SecondChanceAtRimAccuracy,SecondChanceAtRimFGM,PenaltyCorner3FGA,PenaltyCorner3Frequency,Corner3PctAssisted,SecondChanceFtPoints,OffCorner3ReboundPct,SecondChanceArc3Accuracy,SecondChanceArc3FGM,SecondChanceFG3M,SecondChanceFg3Pct,"3pt_And_1_Free_Throw_Trips",Defensive_3_Seconds_Violations,Period4Fouls5Minutes,StepOutOfBoundsTurnovers,Period1Fouls2Minutes) VALUES',
c)
return()
y=log('2020-21','Regular+Season','101108','Player')
conn.commit()
conn.close()
cur.close()
So was wondering if i could insert the data so it matched with the key and the column name. So the table and dict isnt order the same way either if it makes a difference.

This is fairly simple to do by adopting two helper libraries: pandas and preql.
You can use pandas to load the json into a single dataframe, and then use preql to import it into the database.
Here is runnable code demonstrating how to do it:
import requests
import pandas as pd
from preql import Preql
headers = {'user-agent': 'Chrome/88.0.4324.190'}
def log(S,ST,EI,ET):
url = 'https://api.pbpstats.com/get-game-logs/nba'
payload = {
'Season': S,
'SeasonType': ST,
'EntityId': EI,
'EntityType': ET
}
r = requests.get(url, headers=headers, params=payload).json()
if r == {'error': 'no results'} :
return
else:
return [{'Player_id':EI, **d} for d in r['multi_row_table_data']]
rows=log('2020-21','Regular+Season','101108','Player')
df = pd.DataFrame.from_dict(rows)
print("Dataframe shape:", df.shape) # (50, 218)
p = Preql() # For postgres use: p = Preql("postgres://user:pass#server")
p.import_pandas(my_table=df)
print('SQL columns:', p('count(columns(my_table))')) # 219 - includes id
print('SQL rows:', p('count(my_table)')) # 50
Note that this code example is currently using Python's built-in Sqlite, but you can easily make it work with postgres by providing Preql with the postgres URL, as the comment shows.
Install them with pip install pandas preql-lang

Might not be the best solution, but I wrote a small function that just converts to SQL statement.
def insert_into_table_query(table, data):
col_names = list(data.keys())
task = tuple(data.values())
col_str = ', '.join(str(item) for item in col_names)
col_str = '(' + col_str + ')'
value_str = ', '.join('%s' for item in task)
value_str = '(' + value_str + ');'
sql = 'INSERT INTO {tn} '.format(tn=table) + col_str + ' VALUES ' + value_str
return (sql, task)

Related

Matching thousands of data takes too much time with Pandas

I receive every day a report with some values and I have to match postal codes from countries all over the world to get the right region. Then I upload the result in my Django app.
Here's a look at my report:
Order Number
Date
City
Postal code
930276
27/09/2022
Madrid
cp: 28033
929670
27/09/2022
Lisboa
cp: 1600-812
I have thousands of rows like this. The objective is to retrieve the region in ISO 3166-2 format. To help me, I accessed the following page Geonames and downloaded all the countries' information (example: "FR.txt", "ES.txt"...)
Because this is a huge txt file, I chose to store it on a S3 Server.
Here is what I tried:
def access_scaleway(region_name, endpoint_url, access_key, secret_key):
""" Accessing Scaleway Bucket """
scaleway = boto3.client('s3', region_name=region_name, endpoint_url=endpoint_url, aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
return scaleway
def get_region_code_accessing_scaleway(countries, regions):
''' Retrieves the region code from the region name. '''
list_countries = countries
list_regions = regions
list_regions_codes = []
scaleway_session = access_scaleway(region_name=settings.SCALEWAY_S3_REGION_NAME,
endpoint_url=settings.SCALEWAY_S3_ENDPOINT_URL,
access_key=settings.SCALEWAY_ACCESS_KEY_ID,
secret_key=settings.SCALEWAY_SECRET_ACCESS_KEY)
for country, region in zip(list_countries, list_regions):
try:
obj = scaleway_session.get_object(Bucket=settings.SCALEWAY_STORAGE_BUCKET_NAME, Key=f'countries/{country}.txt')
df = pd.read_csv(io.BytesIO(obj['Body'].read()), sep='\t', header=None)
df.columns = ['country code', 'postal code', 'place name', 'admin name1', 'admin code1', 'admin name2', 'admin code2', 'admin name3', 'admin code3', 'latitude', 'longitude', 'accuracy']
df['postal code'] = df['postal code'].astype(str)
df['postal code'] = df['postal code'].str.zfill(5)
# Removing all spaces and special characters
postal_code = re.sub("[^0-9^-]", '', region).strip()
region_code = country + "-" + df[df['postal code'] == postal_code]['admin code1'].values[0]
list_regions_codes.append(region_code)
except AttributeError:
list_regions_codes.append(None)
except ValueError:
list_regions_codes.append(None)
return list_regions_codes
But it is way too long. For a simple report of 1000 rows, it takes like 30 min.
My second try was to go with the OpenDataSoft public API. Here is what I tried:
def fetch_data(url, params, headers=None):
response = requests.get(url=url, params=params, headers=headers)
return response
def get_region_code_accessing_scaleway(countries, regions):
''' Retrieves the region code from the region name. '''
list_countries = countries
list_regions = regions
list_regions_codes = []
for country, region in zip(list_countries, list_regions):
try:
#Get response from API
postal_code = re.sub("[^0-9^-]", '', region).strip()
response = fetch_data(
url="https://data.opendatasoft.com/api/v2/catalog/datasets/geonames-postal-code%40public/records?",
params="select=country_code%2C%20postal_code%2C%20admin_code1&where=country_code%3D%22" + country + "%22%20and%20postal_code%3D%22" + postal_code + "%22")
if response.status_code == 200:
data = response.json()
if len(data['records']) > 0:
list_regions_codes.append(country + "-" + data['records'][0]['record']['fields']['admin_code1'])
else:
list_regions_codes.append(None)
else:
print('Error:" + response.status_code')
list_regions_codes.append(None)
But once again, it takes like forever to get matching values.
The last thing I tried was to go with pgeocode but it was also too long.
I don't understand why it is so long because the desired output is this one:
Order Number
Date
City
Postal code
Region code
930276
27/09/2022
Madrid
cp: 28033
ES-MD
929670
27/09/2022
Lisboa
cp: 1600-812
PT-08
Do you have any idea to speed up the process?

Python mysql code to insert dictionary containing an Array

I have a dictionary like this;
{'name': '0004', 'encodings': array([-2.05818519e-01, 1.50254071e-01, 6.18976653e-02, -4.57169749e-02,
-1.07391022e-01, 5.82340732e-02, 1.71395876e-02, -6.04623035e-02,
1.16265789e-01, -1.24150608e-02, 2.55038321e-01, 2.44104303e-03,
-2.83989906e-01, -7.16208220e-02, -1.18346401e-01, 6.68070763e-02,
-1.55324042e-01, -1.11675814e-01, -1.44206494e-01, -2.48661116e-02,
4.79197986e-02, -3.35404947e-02, -2.06724089e-02, 5.70063107e-02,
-1.29669383e-01, -2.63163120e-01, -2.25746073e-04, -1.47813573e-01,
6.61746860e-02, -2.05630586e-01, -2.89494134e-02, -8.06591734e-02,
-1.74903452e-01, -1.17690712e-01, -8.54253620e-02, 1.46108493e-03,
-7.83449411e-03, -7.44407028e-02, 2.03817844e-01, -4.55042198e-02,
-1.86186373e-01, -1.54956458e-02, 4.17447761e-02, 3.07781637e-01,
1.80454239e-01, 1.86630823e-02, 5.65212369e-02, -9.69169587e-02,
1.39696896e-01, -2.83250719e-01, -3.60675156e-04, 1.29852593e-01,
1.69919491e-01, 2.47877426e-02, 2.96924170e-02, -1.77335575e-01,
-2.26391852e-03, 1.38161883e-01, -1.87802404e-01, 1.11906916e-01,
4.17628363e-02, -6.03848845e-02, 4.18845750e-03, -5.18675111e-02,
2.16162637e-01, 4.84820902e-02, -1.24477677e-01, -8.92214701e-02,
1.42987236e-01, -1.07746974e-01, 1.67147964e-02, 1.29372582e-01,
-6.53869957e-02, -2.22480565e-01, -2.30741382e-01, 8.90350789e-02,
4.72032219e-01, 1.94205374e-01, -1.43704772e-01, 1.38391014e-02,
-2.22896904e-01, -4.31186557e-02, 2.22993959e-02, 5.01501486e-02,
-1.09650522e-01, 2.00281274e-02, -1.12852253e-01, 8.36469531e-02,
1.81203574e-01, -6.09542057e-03, 2.61690491e-03, 1.59612983e-01,
5.85054457e-02, -5.77166155e-02, 2.08678767e-02, 7.78703764e-02,
-1.74884677e-01, 4.89859655e-02, -4.20536213e-02, 2.84303911e-02,
5.88016734e-02, -9.87139642e-02, 1.04927823e-01, 4.22693267e-02,
-1.54544935e-01, 1.09288253e-01, -6.07409002e-03, -2.16740593e-02,
1.54772867e-03, -7.67392293e-02, -2.64447108e-02, 4.24488354e-03,
1.71442956e-01, -2.87759811e-01, 1.82956830e-01, 1.60583854e-01,
3.09638251e-02, 1.53580874e-01, 9.96040404e-02, 3.40097286e-02,
2.06465945e-02, 7.02249445e-03, -9.22998041e-02, -6.18107505e-02,
7.82211274e-02, -8.35414380e-02, 1.60512835e-01, -1.17839221e-02])}
And I used this SQL command to insert;
INSERT INTO image ( `name`, `encodings` ) VALUES ( '0004', '[-2.05818519e-01 1.50254071e-01 6.18976653e-02 -4.57169749e-02
-1.07391022e-01 5.82340732e-02 1.71395876e-02 -6.04623035e-02
1.16265789e-01 -1.24150608e-02 2.55038321e-01 2.44104303e-03
-2.83989906e-01 -7.16208220e-02 -1.18346401e-01 6.68070763e-02
-1.55324042e-01 -1.11675814e-01 -1.44206494e-01 -2.48661116e-02
4.79197986e-02 -3.35404947e-02 -2.06724089e-02 5.70063107e-02
-1.29669383e-01 -2.63163120e-01 -2.25746073e-04 -1.47813573e-01
6.61746860e-02 -2.05630586e-01 -2.89494134e-02 -8.06591734e-02
-1.74903452e-01 -1.17690712e-01 -8.54253620e-02 1.46108493e-03
-7.83449411e-03 -7.44407028e-02 2.03817844e-01 -4.55042198e-02
-1.86186373e-01 -1.54956458e-02 4.17447761e-02 3.07781637e-01
1.80454239e-01 1.86630823e-02 5.65212369e-02 -9.69169587e-02
1.39696896e-01 -2.83250719e-01 -3.60675156e-04 1.29852593e-01
1.69919491e-01 2.47877426e-02 2.96924170e-02 -1.77335575e-01
-2.26391852e-03 1.38161883e-01 -1.87802404e-01 1.11906916e-01
4.17628363e-02 -6.03848845e-02 4.18845750e-03 -5.18675111e-02
2.16162637e-01 4.84820902e-02 -1.24477677e-01 -8.92214701e-02
1.42987236e-01 -1.07746974e-01 1.67147964e-02 1.29372582e-01
-6.53869957e-02 -2.22480565e-01 -2.30741382e-01 8.90350789e-02
4.72032219e-01 1.94205374e-01 -1.43704772e-01 1.38391014e-02
-2.22896904e-01 -4.31186557e-02 2.22993959e-02 5.01501486e-02
-1.09650522e-01 2.00281274e-02 -1.12852253e-01 8.36469531e-02
1.81203574e-01 -6.09542057e-03 2.61690491e-03 1.59612983e-01
5.85054457e-02 -5.77166155e-02 2.08678767e-02 7.78703764e-02
-1.74884677e-01 4.89859655e-02 -4.20536213e-02 2.84303911e-02
5.88016734e-02 -9.87139642e-02 1.04927823e-01 4.22693267e-02
-1.54544935e-01 1.09288253e-01 -6.07409002e-03 -2.16740593e-02
1.54772867e-03 -7.67392293e-02 -2.64447108e-02 4.24488354e-03
1.71442956e-01 -2.87759811e-01 1.82956830e-01 1.60583854e-01
3.09638251e-02 1.53580874e-01 9.96040404e-02 3.40097286e-02
2.06465945e-02 7.02249445e-03 -9.22998041e-02 -6.18107505e-02
7.82211274e-02 -8.35414380e-02 1.60512835e-01 -1.17839221e-02]' );
But the encoding part is not an array anymore, but text. Therefore face recognition app is not working to check.
How can I insert and retrieve the images from MySQL database to control for face recognition?
Thanks in advance.
Edit:
I added the dictionary like this:
Dictionary;
{'name': '0001', 'encodings': array([-2.05818519e-01, 1.50254071e-01, 6.18976653e-02, -4.57169749e-02,
-1.07391022e-01, 5.82340732e-02, 1.71395876e-02, -6.04623035e-02,
1.16265789e-01, -1.24150608e-02, 2.55038321e-01, 2.44104303e-03,
-2.83989906e-01, -7.16208220e-02, -1.18346401e-01, 6.68070763e-02,
-1.55324042e-01, -1.11675814e-01, -1.44206494e-01, -2.48661116e-02,
4.79197986e-02, -3.35404947e-02, -2.06724089e-02, 5.70063107e-02,
-1.29669383e-01, -2.63163120e-01, -2.25746073e-04, -1.47813573e-01,
6.61746860e-02, -2.05630586e-01, -2.89494134e-02, -8.06591734e-02,
-1.74903452e-01, -1.17690712e-01, -8.54253620e-02, 1.46108493e-03,
-7.83449411e-03, -7.44407028e-02, 2.03817844e-01, -4.55042198e-02,
-1.86186373e-01, -1.54956458e-02, 4.17447761e-02, 3.07781637e-01,
1.80454239e-01, 1.86630823e-02, 5.65212369e-02, -9.69169587e-02,
1.39696896e-01, -2.83250719e-01, -3.60675156e-04, 1.29852593e-01,
1.69919491e-01, 2.47877426e-02, 2.96924170e-02, -1.77335575e-01,
-2.26391852e-03, 1.38161883e-01, -1.87802404e-01, 1.11906916e-01,
4.17628363e-02, -6.03848845e-02, 4.18845750e-03, -5.18675111e-02,
2.16162637e-01, 4.84820902e-02, -1.24477677e-01, -8.92214701e-02,
1.42987236e-01, -1.07746974e-01, 1.67147964e-02, 1.29372582e-01,
-6.53869957e-02, -2.22480565e-01, -2.30741382e-01, 8.90350789e-02,
4.72032219e-01, 1.94205374e-01, -1.43704772e-01, 1.38391014e-02,
-2.22896904e-01, -4.31186557e-02, 2.22993959e-02, 5.01501486e-02,
-1.09650522e-01, 2.00281274e-02, -1.12852253e-01, 8.36469531e-02,
1.81203574e-01, -6.09542057e-03, 2.61690491e-03, 1.59612983e-01,
5.85054457e-02, -5.77166155e-02, 2.08678767e-02, 7.78703764e-02,
-1.74884677e-01, 4.89859655e-02, -4.20536213e-02, 2.84303911e-02,
5.88016734e-02, -9.87139642e-02, 1.04927823e-01, 4.22693267e-02,
-1.54544935e-01, 1.09288253e-01, -6.07409002e-03, -2.16740593e-02,
1.54772867e-03, -7.67392293e-02, -2.64447108e-02, 4.24488354e-03,
1.71442956e-01, -2.87759811e-01, 1.82956830e-01, 1.60583854e-01,
3.09638251e-02, 1.53580874e-01, 9.96040404e-02, 3.40097286e-02,
2.06465945e-02, 7.02249445e-03, -9.22998041e-02, -6.18107505e-02,
7.82211274e-02, -8.35414380e-02, 1.60512835e-01, -1.17839221e-02])}
columns = ', '.join("`" + str(x).replace('/', '_') + "`" for x in data.keys())
values = ', '.join("'" + str(x).replace('/', '_') + "'" for x in data.values())
sql = "INSERT INTO %s ( %s ) VALUES ( %s );" % (self.table_name, columns, values)
with self.connection.cursor() as cursor:
cursor.execute(sql)
self.connection.commit()
You need to convert the string into array when retrieving the encode from database.
Here is a look at the sample python code.
encoding = "[-2.05818519e-01 1.50254071e-01 6.18976653e-02 -4.57169749e-02 -1.07391022e-01 5.82340732e-02 1.71395876e-02 -6.04623035e-02 1.16265789e-01 -1.24150608e-02 2.55038321e-01 2.44104303e-03 -2.83989906e-01]" // by select * from table
result = encoding[1:][:-1].split(" ")
print ("The converted array is", result)
// [-2.05818519e-01, 1.50254071e-01, 6.18976653e-02, -4.57169749e-02, -1.07391022e-01, 5.82340732e-02, 1.71395876e-02, -6.04623035e-02, 1.16265789e-01, -1.24150608e-02, 2.55038321e-01, 2.44104303e-03, -2.83989906e-01]
Maybe, inserting the dict into database would work well, if you change the type of dict into string before inserting it.

How to insert 1000 random int value rows into a column in Sqlite?

i'm a newbie in python3. My homework is create a Sqlite database include 10 tables, each table contains 50 columns, each columns contains 1000 rows, data is randomly generated using Python. I have almost done.
My code :
import sqlite3
conn = sqlite3.connect('testmydb.db')
cur = conn.cursor()
for table_number in range(1,11):
cur.execute('''CREATE TABLE table''' + str(table_number) + '''(id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT)''')
listOfColumns = ("column0",)
for column_number in range(1,49):
newColumn = ("column" + str(column_number),)
listOfColumns = listOfColumns + newColumn
for column_number in listOfColumns:
cur.execute('''ALTER TABLE table''' + str(table_number) + ''' ADD COLUMN %s TEXT''' % column_number)
conn.commit()
cur.close()
conn.close()
Now i want to insert 1000 row into 1 colums but i'm confusing when i wanted create a for loop more. Can anyone suggest me ?
The following should do what you want (see comments)
import sqlite3
import random
conn = sqlite3.connect('testmydb.db')
cur = conn.cursor()
#Only need to do this once
listOfColumns = ("column0",)
bindMarkers = ",?" #ADDED to allow values to be bound will be ?,?,?,?, ........ 49 ?
for column_number in range(1, 49):
newColumn = ("column" + str(column_number),)
listOfColumns = listOfColumns + newColumn
bindMarkers = bindMarkers + ",?"
for table_number in range(1,11):
cur.execute("DROP TABLE IF EXISTS table" + str(table_number)) #make it rerunnable
cur.execute('''CREATE TABLE IF NOT EXISTS table''' + str(table_number) + '''(id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT)''')
for column_number in listOfColumns:
cur.execute('''ALTER TABLE table''' + str(table_number) + ''' ADD COLUMN %s TEXT''' % column_number)
# The INSERT statement note null means that the id will be automatically generated
insertsql = "INSERT INTO table" + str(table_number) + " VALUES(null" + bindMarkers + ")"
#print the INSERT SQL (just the once)
if table_number == 1:
print(insertsql)
for row_number in range(1,1001):
# Generate a list of 49 random values
listOfRandomValues =[random.randint(1, 999999999999) for i in range(49)]
cur.execute(insertsql,listOfRandomValues) # insert the row
# extract the first 5 rows an print each row
cursor = cur.execute("SELECT * FROM table" + str(table_number) + " LIMIT 5")
result = "row in table table" + str(table_number) + " Data is "
for row in cursor:
print(row)
conn.commit()
cur.close()
conn.close()
This will produce output like (first line is the first INSERT statement/SQL) :-
INSERT INTO table1 VALUES(null,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
(1, '208968800970', '673486951978', '416011320117', '257739455602', '161014001387', '66915092142', '192394825558', '894946418178', '147479449787', '429768915009', '343072031065', '312483697033', '38240897968', '179592184222', '517690986147', '401721693004', '760956848808', '787028914225', '658523299261', '923606731801', '740090529164', '169600507787', '441903806645', '82302358448', '250921627878', '542116452618', '998918595471', '775548995005', '733089506549', '957054106540', '449321507524', '798501631292', '409382414444', '945602662286', '706232454927', '930118739979', '691405693853', '201175361297', '513975533346', '16690109599', '592944414377', '948709328664', '490207084748', '406188522423', '799744354342', '474761616653', '314527920015', '94102072722', '912028741567')
(2, '172875509043', '126844020427', '423436418690', '973535472434', '171412421537', '693479106176', '909004577995', '920911700813', '605955273811', '325652512054', '94057263900', '45907520985', '64928934172', '301130729226', '103229253943', '114469347031', '551553752113', '626314462779', '22617947251', '997836163264', '585793592332', '620096766798', '565760327235', '348031514661', '871589505728', '58320228377', '179652288652', '977988196994', '742110712624', '201181530463', '816248034687', '22951611374', '723154858722', '289036915539', '997272483698', '61348539011', '977373908399', '668284539899', '55348735729', '263726052214', '662603583920', '790720286573', '487793507420', '883073500835', '519633722649', '383008255347', '30563959610', '617324332661', '89956476106')
(3, '253567041183', '70027774987', '535230659770', '191267720449', '791090949115', '399626615217', '649492276413', '594283985270', '983353743022', '713002984294', '982490173135', '109850128623', '571489216078', '900560015434', '729185220526', '362712800267', '619582132251', '990925729743', '144006421433', '790742578660', '64886161120', '266462916556', '89211644675', '941650491818', '878437527129', '827767387129', '899754797443', '280555144440', '623469334050', '882001652568', '395198811620', '393149546360', '509545198950', '534252806675', '582802496697', '674715538387', '748829323303', '296068248515', '573789396002', '84015250035', '963083904856', '677426863455', '173505995385', '569976297792', '643158854425', '273191627696', '676364784545', '536715691007', '678846958313')
(4, '575055534615', '179094408882', '418242646417', '258767847915', '533305509121', '800410396430', '416643709991', '453093467839', '352906227023', '711478657972', '542560050616', '477511637703', '464619274323', '438591712313', '293891594997', '638717557413', '796607432824', '845617819673', '682479247215', '687662681530', '682910774205', '547150987433', '645097550529', '781225444825', '498491871793', '280308928866', '386747319120', '175187502068', '554032903538', '906897892968', '847200546291', '724824936579', '257524554306', '341479642174', '628478037881', '41911000836', '487139622046', '698641404274', '300203051807', '321147725978', '201308004931', '324554566932', '54668008952', '799888599714', '544776279131', '851164639529', '1118079080', '993554994315', '97774308420')
(5, '263377483252', '535276579958', '434436394255', '235123585872', '886866465625', '83437890933', '546739192349', '832929945092', '889303183895', '517501283515', '386452334064', '437005515113', '567305852696', '254940127493', '158473804439', '714105412308', '887616841407', '873758857265', '59024734698', '495085412255', '757296111012', '438130715784', '661863799528', '370244296694', '559859930401', '409259131854', '72716791778', '900054227569', '897455645761', '254989679831', '46456169823', '597888422562', '581408791663', '191438417130', '468539979785', '998729241595', '596707251066', '731997835957', '432001941801', '351970232680', '602771773558', '793033654396', '205236245465', '547142878108', '973842386021', '742055066627', '455501634405', '130419180039', '870186517783')
(1, '472841964440', '177094420514', '859773622393', '943573354468', '909606787130', '278659426379', '129796913302', '67857238168', '104155180296', '581639712382', '451184580063', '917433785632', '226959780068', '190462507493', '256274613979', '919674630928', '976702823134', '121337013780', '254022515917', '293782992065', '903483153770', '147697931939', '279062893088', '553519369139', '962433270653', '640822114280', '816716757345', '999707836592', '697963179054', '104305203866', '735705858863', '617083342099', '262076004375', '797912340506', '205887749382', '576489282235', '705096989440', '670969562520', '649164826831', '311493582872', '760367591190', '749686855909', '819181100789', '466265188300', '304292298579', '420782152623', '854335337149', '916391611738', '964274785687')
(2, '621325506597', '776006955683', '137683264810', '351906945610', '682429690372', '965366508605', '666337420753', '453325880143', '70778770818', '103682937480', '868216544504', '229703959756', '41004116292', '507097353534', '871910281669', '251530835311', '836500603189', '601460038094', '897559700303', '681312522817', '161143454247', '553960203443', '777460295192', '458302954528', '977754347041', '892360041754', '681995024692', '248485864749', '348381577064', '450879805019', '650777503736', '353872867221', '97506344721', '747237255889', '455629065944', '861413783175', '214743871915', '77511793017', '621196858622', '825422146350', '489409477723', '908004452720', '238639741015', '426722798842', '980323652543', '561628376666', '838205614824', '784039262073', '949055065484')
(3, '736008123891', '923934389646', '546159245294', '429258073881', '583372466354', '50804206500', '273716995212', '733988654121', '788160350686', '749598895287', '551751993459', '916986772574', '622366294456', '687624270621', '185660393899', '329963428664', '928661078668', '875765821125', '754653923243', '151547845857', '248763933358', '636547599095', '87140063802', '267688269107', '224477253917', '641792646340', '59046381016', '103443043545', '485267444040', '387215340714', '268223896307', '480068950182', '225811319773', '492031230630', '502916805016', '514567127425', '178032451267', '750288734257', '825600642728', '641081438590', '207022050440', '902457228778', '115373751089', '348372424350', '768147081429', '715162751738', '210598155420', '196905259558', '873091126544')
(4, '560125266801', '378302831641', '471084702841', '679900688640', '201624340251', '909766550240', '687623074376', '116508086811', '217573740193', '378086229046', '466649195230', '932285473013', '648745964471', '968517127245', '748917121449', '224930472692', '698734544540', '793428186573', '153336974374', '24843476682', '42926459163', '503345524005', '116363947828', '524399560588', '238188045685', '3353134402', '97245283198', '780904780984', '768226492682', '337351478339', '761762114083', '4108216481', '715457129140', '718946387960', '808632491477', '283509135313', '750631442686', '302040053814', '354520401885', '30869550070', '831081853310', '317334330124', '175699898404', '316762996417', '144843539429', '647890863625', '500905345131', '686585819856', '439083530058')
(5, '786320993918', '418227705376', '222672045565', '50994821164', '445050766070', '655740733971', '144925180595', '178456995314', '968483620704', '217344736719', '659133382247', '699130444999', '645737723689', '211418136852', '977174813693', '404005933734', '416012774264', '498694089898', '286235598876', '105048705716', '745323502156', '22320974963', '287621972357', '484051431377', '677832782489', '175141638805', '652237666867', '633826915005', '826792363302', '181964153730', '549735148579', '820006084751', '622355043852', '615716362152', '337022948655', '280970738440', '264064973515', '550249406679', '912858473551', '542805313957', '43397863679', '257720759974', '189160263335', '265086252271', '692156831796', '860245023055', '769544988002', '856033591981', '865669688852')
(1, '29773154022', '105812125224', '923886735040', '494040618517', '406872772654', '964605045362', '483548207268', '222657267987', '728533595865', '427758006630', '250839721516', '246117222632', '625392752778', '372756660516', '276521371279', '677307428516', '434498176501', '757867858941', '568841625163', '315224423736', '939706907834', '567757610656', '977473375050', '476473505693', '921117900131', '344700573908', '350627473109', '569315794206', '780528101292', '957322180230', '952406583209', '435610932961', '463449885730', '174468401098', '916963726643', '193968348451', '297427605119', '481930164885', '685603984144', '543719297225', '612929787721', '475021539217', '176642603133', '74400339089', '95276914071', '808000358479', '79312180687', '502877681225', '659274942719')
..........
i still don't get what is variable bindMarkers and if table_number == 1: print(insertsql)
First the 2nd the line if table_number == 1: print(insertsql)
Is just printing out the INSERT statement to show what it looks like. It was just included for that and is not necessary. BUT, it's useful to know what it looks like to explain the ? placeholder and binding values.
So the INSERT statement is along the lines of
INSERT INTO tablex VALUES(null,?,? ....... (49 ?'s)
tablex where x represents 1-10
First null as per the comment allows SQLite to generate a unique value for the id column.
Each ? is a placeholder and will be replaced by a bound value. This technique prevents SQL injection.
bindMarkers is just a string that is generated with 1 ? per column so it's a string of 49 ?'s (easier than typing VALUES(null,?,?,?,?,? .....) and also more flexible/adaptable if the column number were to change).
You see that the line listOfRandomValues =[random.randint(1, 999999999999) for i in range(49)] creates a List of 49 random values each will be used to replace a single ? (the first value replaces the first ?, the second value replaces the second ? and so on).
This is considered better practice than building a statement along the lines of
INSERT INTO tablex VALUES(null,'208968800970', '673486951978', '416011320117', '257739455602', '161014001387', '66915092142', '192394825558', '894946418178', '147479449787', '429768915009', '343072031065', '312483697033', '38240897968', '179592184222', '517690986147', '401721693004', '760956848808', '787028914225', '658523299261', '923606731801', '740090529164', '169600507787', '441903806645', '82302358448', '250921627878', '542116452618', '998918595471', '775548995005', '733089506549', '957054106540', '449321507524', '798501631292', '409382414444', '945602662286', '706232454927', '930118739979', '691405693853', '201175361297', '513975533346', '16690109599', '592944414377', '948709328664', '490207084748', '406188522423', '799744354342', '474761616653', '314527920015', '94102072722', '912028741567')
The statement itself is shorter (i.e 1 ? instead of 12 digits) and therefore less likely to cause issues with limits.

Search for key word and convert the output into new line after it matches in python

I have below List output from the a code which i'm working in python where i'm specifically looking for memberUid string and want every names after that to be printed into new line:
like:
anshulm
jiafan
and while prnting these names as soon it will get 'cn' just stop the print.
[[('cn=oracle,ou=Group,ou=corp,ou=services,o=kk.com', {'description': ['oracle group'], 'businessCategory': ['Private'], 'objectClass': ['top', 'groupOfUniqueNames', 'posixGroup'], 'memberUid': ['anshulm', 'jiafan', 'manasij', 'asbisht', 'karnika', 'junle', 'amitsh', 'fuwei', 'dewansh', 'gouravr', 'harshitb', 'tandel', 'matte', 'izamir', 'elie', 'emiliano', 'mateuszw', 'theo', 'mahdi', 'hassan', 'gshruti', 'makhiles', 'prabhaka', 'shgarg', 'ritolia', 'wadhwani', 'steev', 'rtlsbld', 'nikhilb', 'fwang', 'ankitb', 'rtls', 'amitb', 'agautam', 'pratyush', 'hywang', 'dsouder', 'foutz', 'parimi', 'pradeepn', 'patrickg', 'pkunwar', 'tejinder', 'ramteke', 'jangra', 'kush', 'kundan', 'mohang', 'xiang', 'xinjia', 'anantv', 'christos', 'achugh', 'kbhatt', 'jroy', 'kusantos', 'kamleshm', 'iraa', 'indrajit'], 'gidNumber': ['9393'], 'owner': ['varshney'], 'cn': ['oracle']})]]
Below is my code which is yielding the above output:
import ldap
## first you must open a connection to the server
try:
l = ldap.initialize("ldap://ldapserver:389")
l.protocol_version = ldap.VERSION3
except ldap.LDAPError, e:
print e
baseDN = "ou=group,ou=corp,ou=services,o=kk.com"
searchScope = ldap.SCOPE_SUBTREE
retrieveAttributes = None
searchFilter = raw_input("Enter the Group Name: ")
try:
ldap_result_id = l.search(baseDN, searchScope, searchFilter, retrieveAttributes)
result_set = []
while 1:
result_type, result_data = l.result(ldap_result_id, 0)
if (result_data == []):
break
else:
if result_type == ldap.RES_SEARCH_ENTRY:
result_set.append(result_data)
print result_set
except ldap.LDAPError, e:
print e
You should extract desired part from result_set, for example:
result_set[0][0][1]['memberUid']
and print it with any manner you like:
from pprint import pprint
pprint(result_set[0][0][1]['memberUid'])
or
print('\n'.join(name for name in result_set[0][0][1]['memberUid']))

Multiple arguments for Python MySQLdb clause

Below is a small subset of the data I'm working with. I can format the data any way I please. The data within the variable 'dc' is made up of the values 'id1' and 'id2'. What I want to do is be able to issue one SELECT statement for all of the values I have in 'dc'. For some reason, no matter what I try in the 'cursor.execute' statement or within the 'format_strings' variable I can't seem to get the proper code to be able to pass two variables to MySQL.
Comments/suggestions on how to format the data ('dc') or code to perform one SELECT statement would be very helpful.
results = ()
dc = ['103,4770634', '42,427752', '64,10122045', '42,13603629', '42,25516425', '103,2748102', '42,1966402', '42,30262834', '42,6667711', '18,13737683', '42,28921168', '42,26076925', '103,3733654', '42,23313527', '64,3307344', '103,3973533', '42,6360982', '48,11846077', '103,3775309', '64,10122050', '42,1965119', '103,4265810', '103,3971645', '103,4962583', '103,689615', '42,22834366', '103,761655', '95,1184', '64,9594482', '42,22855603', '48,8654764', '103,4226756', '42,23366982', '103,3897036', '42,11339650', '101,6369', '42,25830920', '103,5009291', '42,29238961', '59,6299475', '42,22931663', '42,25839056', '43,11864458', '43,41346192', '103,4261645', '42,3747082', '103,4795050', '42,9417503', '103,4245623', '42,61431911']
try:
format_strings = ','.join(['%s%s'] * len(dc))
cursor.execute("SELECT * FROM tbl1 WHERE id1=(%s) AND id2=(%s)" % format_strings, (dc))
res = cursor.fetchall()
results = results + res
except Exception, e:
print e
UPDATE
Taking what #lecumia and #beroe posted below I came up with the following, not as elegant and probably not super efficient but it works.
results = ()
id1 = []
id2 = []
dc = ['103,4770634', '42,427752', '64,10122045', '42,13603629', '42,25516425']
for d in dc:
id1.append(d.split(',')[0])
id2.append(d.split(',')[1])
try:
sql = "SELECT * FROM DomainEmails WHERE email_id IN (%s) AND domain_id IN (%s)"
in_id1 = "'" + "', '".join(id1) + "'"
in_id2 = "'" + "', '".join(id2) + "'"
sql = sql % (in_id1, in_id2)
cursor.execute(sql)
res = cursor.fetchall()
results = results + res
except Exception, e:
print e
Actual Query
SELECT * FROM tbl1 WHERE id1 IN ('103', '42', '64', '42', '42') AND id2 IN ('4770634', '427752', '10122045', '13603629', '25516425')
Query Results
These match what I was expecting:
{'id1': 42L, 'id2': 427752L, 'firstseen': datetime.date(2010, 5, 6)}
{'id1': 42L, 'id2': 427752L, 'firstseen': datetime.date(2011, 5, 2)}
{'id1': 42L, 'id2': 13603629L, 'firstseen': datetime.date(2011, 3, 21)}
{'id1': 42L, 'id2': 13603629L, 'firstseen': datetime.date(2011, 4, 17)}
based on
Executing "SELECT ... WHERE ... IN ..." using MySQLdb
results = ()
dc = ['103,4770634', '42,427752', '64,10122045', '42,13603629', '42,25516425', '103,2748102', '42,1966402', '42,30262834', '42,6667711', '18,13737683', '42,28921168', '42,26076925', '103,3733654', '42,23313527', '64,3307344', '103,3973533', '42,6360982', '48,11846077', '103,3775309', '64,10122050', '42,1965119', '103,4265810', '103,3971645', '103,4962583', '103,689615', '42,22834366', '103,761655', '95,1184', '64,9594482', '42,22855603', '48,8654764', '103,4226756', '42,23366982', '103,3897036', '42,11339650', '101,6369', '42,25830920', '103,5009291', '42,29238961', '59,6299475', '42,22931663', '42,25839056', '43,11864458', '43,41346192', '103,4261645', '42,3747082', '103,4795050', '42,9417503', '103,4245623', '42,61431911']
try:
sql = "SELECT * FROM tbl1 WHERE id1 in (%s) AND id2 in (%s)"
in_ids = ', '.join(map(lambda x: '%s', dc))
in_ids = in_ids % tuple(dc)
sql = sql % (in_ids, in_ids)
cursor.execute(sql)
res = cursor.fetchall()
results = results + res
except Exception, e:
print e
Results
SELECT * FROM tbl1 WHERE id1 in (103,4770634, 42,427752, 64,10122045, 42,13603629, 42,25516425, 103,2748102, 42,1966402, 42,30262834, 42,6667711, 18,13737683, 42,28921168, 42,26076925, 103,3733654, 42,23313527, 64,3307344, 103,3973533, 42,6360982, 48,11846077, 103,3775309, 64,10122050, 42,1965119, 103,4265810, 103,3971645, 103,4962583, 103,689615, 42,22834366, 103,761655, 95,1184, 64,9594482, 42,22855603, 48,8654764, 103,4226756, 42,23366982, 103,3897036, 42,11339650, 101,6369, 42,25830920, 103,5009291, 42,29238961, 59,6299475, 42,22931663, 42,25839056, 43,11864458, 43,41346192, 103,4261645, 42,3747082, 103,4795050, 42,9417503, 103,4245623, 42,61431911) AND id2 in (103,4770634, 42,427752, 64,10122045, 42,13603629, 42,25516425, 103,2748102, 42,1966402, 42,30262834, 42,6667711, 18,13737683, 42,28921168, 42,26076925, 103,3733654, 42,23313527, 64,3307344, 103,3973533, 42,6360982, 48,11846077, 103,3775309, 64,10122050, 42,1965119, 103,4265810, 103,3971645, 103,4962583, 103,689615, 42,22834366, 103,761655, 95,1184, 64,9594482, 42,22855603, 48,8654764, 103,4226756, 42,23366982, 103,3897036, 42,11339650, 101,6369, 42,25830920, 103,5009291, 42,29238961, 59,6299475, 42,22931663, 42,25839056, 43,11864458, 43,41346192, 103,4261645, 42,3747082, 103,4795050, 42,9417503, 103,4245623, 42,61431911)

Categories