I new in python. I need connect to mysql database and get data. Before that, I easily took data via R using rodbslike this.
library(DBI)
library(RMySQL)
db_user = 'k'
db_password = 'F6'
db_name = 'meg'
db_table = 'dat'
db_host = 'my.g2s' # for local access
db_port = 3306
# 3. Read data from db
mydbV7 = dbConnect(MySQL(), user = db_user, password = db_password,
dbname = db_name, host = db_host, port = db_port)
sV7 = paste0("select * from ", db_table)
rsV7 = dbSendQuery(mydbV7, sV7)
df = fetch(rsV7, n = -1)
but when i tried to implement the same principle in python i get errors
import pyodbc
>>> db_user = 'k'
>>> db_password = 'F6'
>>> db_name = 'meg'
>>> db_table = 'dat'
>>> db_host = 'my.g2s' # for local access
>>> db_port = 3306
>>> mydbV7 = dbConnect(MySQL(), user = db_user, password = db_password,
... dbname = db_name, host = db_host, port = db_port)
File "<stdin>", line 2
... dbname = db_name, host = db_host, port = db_port)
^
SyntaxError: positional argument follows keyword argument
How can i correct get data via python 3.9?
As always, I appreciate any of your help.
As describe in the MySQL documentation, you can import mysql.connector and then use :
cnx = mysql.connector.connect(user = 'scott',
password = 'password',
host = '127.0.0.1',
database = 'employees')
Of course, you have to change the values with yours.
cnx will then represent the connection with your database.
I am trying to run the following code which executes with no issues. The sql that is produced is, "INSERT INTO account_login (groupone_account_id, login_date) VALUES ('100', '10:10:00') which has no syntax errors and executed successfully. But when I check the table, the id has not been inserted. I can complete select queries successfully.
The reason why I created database_connection is because it is an external connection and I wanted to isolate it to be able to test the databse connection easier.
def create_groupone_account_login(groupone_account_id):
groupone_account_login_created = False
cursor = database_connection("cursor")
time = datetime.utcnow().isoformat()[:-3] + 'Z'
sql_create_account_login = "INSERT INTO account_login (groupone_account_id, login_date) VALUES ('%s', '%s')" % (
groupone_account_id, time)
cursor.execute(sql_create_account_login)
connection = database_connection("connection")
connection.commit()
cursor.close()
groupone_account_login_created = True
return groupone_account_login_created
def database_connection(variable):
resp_dict = json.loads(get_secret())
endpoint = resp_dict.get('host')
username = resp_dict.get('username')
password = resp_dict.get('password')
database_name = resp_dict.get('dbname')
port = resp_dict.get('port')
connection = pymysql.connect(host=endpoint, user=username, passwd=password, db=database_name, port=port)
cursor = connection.cursor()
if variable == "connection":
return connection
else:
return cursor
I would describe myself as a novice developer and am have written a lambda function in python which queries an RDS mySQL database.I coded this program on aws and am now trying to test it locally don't know where to start. I have tried using moto but my limited knowledge is making things very difficult. Any tips or pointers?
import json
import boto3
from botocore.exceptions import ClientError
from rds_config import get_secret
resp_dict = json.loads(get_secret())
rds_client = boto3.client('rds-data')
# Connection Configuration Values
endpoint = resp_dict.get('host')
username = resp_dict.get('username')
password = resp_dict.get('password')
database_name = resp_dict.get('dbname')
port = resp_dict.get('port')
# Group Configuration Values
Group2 = "#email"
connection = pymysql.connect(host=endpoint, user=username, passwd=password, db=database_name, port=port)
cursor = connection.cursor()
def lambda_handler(event, context):
# Dump and load event data
data = json.dumps(event)
y = json.loads(data)
# Get the email and username
email = y['request']['userAttributes']['email']
cognitoUsername = y['userName']
# verify email and username are the same
if email != cognitoUsername:
raise Exception("Username and Email address must be the same")
try:
if checkGroup2(email):
if searchGroup2TableAccountID(email):
return event
else:
raise Exception("Group 2 user is not present in group 2 table")
else:
accountID = searchGroup1TableAccountID(email)
if accountID is not None:
status = searchOnboardingStatusTableOnboardingOutcome(accountID)
if (status is not None) and (status == 'accepted'):
return event
else:
raise Exception("User is not present in onboarding table")
else:
raise Exception("User is not present in Group1 table ")
except ClientError as e:
print(e.response['Error']['Message'])
def checkGroup2User(email):
if "#email" in email:
return True
else:
return False
def searchGroup1TableAccountID(email):
sqlCheckEmail = "SELECT account_ID from group1 where email_address = %s"
cursor.execute(sqlCheckEmail, (email,))
accountID = cursor.fetchone()[0]
return accountID
def searchOnboardingStatusTableOnboardingOutcome(accountID):
sqlCheckStatus = "SELECT onboarding_outcome from onboarding_status where account_ID = %s"
cursor.execute(sqlCheckStatus, accountID)
status = cursor.fetchone()[0]
return status
def searchGroup2TableAccountID(email):
sqlCheckGroup2AccountID = "SELECT account_id from group2_account where user_email_address = %s"
cursor.execute(sqlCheckGroup2AccountID, (email,))
accountID = cursor.fetchone()
return accountID
I cannot get this script that i created to work.
it needs to collect API data (returns a JSON)
and i want to save specific data to MYSQL
played around with the code and didnt get it to work...
various "expected an indented block" errors
from __future__ import print_function
import requests
import re
import MySQLdb
import json
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(self, data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
i expect the output to be stored into a mysql table... but i get error while trying to run the script.
Also i need to make it run endlessly untill ill kill the process/session....
from __future__ import print_function
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "ssss!"
DATABASE = "sss"
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO usa_news (articles) VALUES (%s)")
cursor.execute(insert_query, (articles,))
db.commit()
cursor.close()
db.close()
return
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=ssssssssss"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
#insert the data into the MySQL database
store_data(articles)
Finally made it work!
Your indents are all messed up, Python relies on indents. Didn't look at the code itself so it might still be bugged, but fixed the indents:
from __future__ import print_function
import requests
import re
import MySQLdb
import json
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
if __name__ == '__main__':
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
on_data(data)
Updated to reflect changes suggested in comments
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "xxxxx"
DATABASE = "xxxxx"
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=xxxxxxxxxxxxxxxxxxx"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO xxxxx (articles) VALUES (%s)")
cursor.execute(insert_query, (articles))
db.commit()
cursor.close()
db.close()
return
#insert the data into the MySQL database
store_data(articles)
User entered wrong password and I see that the badPwdCount in ldap increment to 1 or 2.
However, after I run the below python script to query the user attribute, the badPwdCount return to 0. Any clue?
from ldap3 import Server, Connection
from pprint import pprint
server_uri = 'ldaps://193.178.115.11'
userid = 'user1'
password = 'user1password'
base = 'OU=Champ,DC=soobdev,DC=hq'
user_base = 'CN=%s,%s' % (userid, base)
search_filter = '(CN=%s)' % userid
attrs = ['*']
server = Server(server_uri, get_info='ALL')
with Connection(server, user=user_base, password=password, auto_bind=True ) as conn:
pprint('conn.result %s' % conn.result)
conn.search(base, search_filter, attributes=attrs)
pprint('conn.entries %s' % conn.entries)