sending email but eliminating 'L' in the string from python - python

My output is coming : total records in staging table are (3L,)
I only want number n brackets and how can i remove L ,
Im converting query to str because i am not able to concanete part1 and msg
#!/usr/bin/env python
import csv
import MySQLdb
import pysftp
import smtplib
import sys
#connection to Database
conn = MySQLdb.connect(host="XXXXX", # The Host
user="XXXXX", # username
passwd="XXXXX", # password
db="XXXXX") # name of the data base
part1 = """From: XXXXX
To: To Person <XXXXX.com>
MIME-Version: 1.0
Content-type: text/html
Subject: SMTP HTML e-mail test
Toatal records in Staging Table inserted:
"""
sender = 'XXXXX'
receivers = ['XXXXX','XXXXX']
x = conn.cursor()
query = "select count(*) as toatal_records_inserted from fpwbs_feed_stg"
x.execute(query)
msg = ""
for row in x.fetchall():
msg += str(row)
msg=part1+msg
smtpObj=smtplib.SMTP('XXXXX', 25)
smtpObj.sendmail(sender, receivers, msg)

Do this to get the first item in the tuple:
for row in x.fetchall():
msg += str(row[0])

Related

How to parse variable values in stored proc from dataframe using python

I have written a code to pick specific values from email body and store it in dataframe now the next step is to store those values in oracle database for that i am using sqlalchemy but i am not sure how can i pass those values to store proc like below
call CORE_VALUATIONS.VALUATIONS.INSERTEQCLOSINGPRICE("SGEPSBSH",to_date('"&TEXT(2022-06-01,"DDMMMYYYY")&"','ddmonyyyy'),"111.9852",NULL,NULL);
from sqlalchemy.engine import create_engine
import datetime
today = datetime.date.today()
DIALECT = 'oracle'
SQL_DRIVER = 'cx_oracle'
USERNAME = 'robinhood' # enter your username
PASSWORD = 'XXXXXX' # enter your password
HOST = 'pv-prod-orc-01.XXXXX.com' # enter the oracle db host url
PORT = 1521 # enter the oracle port number
SERVICE = 'XXXX_APP.ec2.internal' # enter the oracle db service name
ENGINE_PATH_WIN_AUTH = DIALECT + '+' + SQL_DRIVER + '://' + USERNAME + ':' + PASSWORD + '#' + HOST + ':' + str(
PORT) + '/?service_name=' + SERVICE
engine = create_engine(ENGINE_PATH_WIN_AUTH)
# test query
query = """
call CORE_VALUATIONS.VALUATIONS.INSERTEQCLOSINGPRICE("**SGEPSBSH**",to_date('"&TEXT(**2022-06-01**,"DDMMMYYYY")&"','ddmonyyyy'),"**111.9852**",NULL,NULL);
"""
con = engine.connect()
outpt = con.execute(query)
con.close()
how can i pass those values to store proc
Call .execute(sql_text, dict_of_param_values), e.g., something like
import sqlalchemy as sa
# …
sql_text = sa.text(
"call CORE_VALUATIONS.VALUATIONS.INSERTEQCLOSINGPRICE(:param1, :param2, :param3, NULL, NULL);"
)
dict_of_param_values = dict(param1="SGEPSBSH", param2="2022-06-01", param3=111.9852)
with engine.begin() as conn:
conn.execute(sql_text, dict_of_param_values)
# (transaction will automatically commit when `with` block exits)

Python script to query data for last 1 hour

Using the below script, I am trying to fetch documents added in last hour for multiple collections; but it's giving me zero value.
Can someone look at the code below and help me in fixing it?
import pymongo
import sys
from datetime import datetime
from datetime import timedelta
from pymongo import MongoClient
# establish connectivity to Mongodb via ssl using pymongo module
#args = sys.argv
host = 'mongo-db-prd'
uname = 'superuser'
passwrd = 'Hayyo'
#print (args)
port = "27017"
print(uname)
print(passwrd)
uri = 'mongodb://' + uname + ":" + passwrd + "#" + host + ":" + port + '/?authSource=admin'
client = MongoClient(uri, ssl=True, ssl_ca_certs='./files/rds-combined-ca-bundle.pem')
# This will create hl7feeds docdb
print("connected client")
db = client.feeds # This command will create a DB
print(client.list_database_names()) # This command will print list of DBs
print(client.list_database_names()) # This command will print list of DBs
mycol = db[ "feeds_100"] # This command will create a collection in DB
docins=mycol.insert_one({"name" : "test"}) # This will insert a document in collection
dblist = client.list_database_names()
print(client.list_database_names())
# Lets create collections on docdb for all tenants
tlist1 = ["feeds_104","feeds_105","feeds_106"]
for each_val in tlist1:
print (each_val)
countvalue = db.getCollection('each_val').find({"row_created_date":{"$gt":datetime.utcnow() - timedelta(hours=1)}}).count();
print (countvalue)
In above query using db[collection-name] method I can get results

Trying to collect API data and send to local mysql

I cannot get this script that i created to work.
it needs to collect API data (returns a JSON)
and i want to save specific data to MYSQL
played around with the code and didnt get it to work...
various "expected an indented block" errors
from __future__ import print_function
import requests
import re
import MySQLdb
import json
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(self, data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
i expect the output to be stored into a mysql table... but i get error while trying to run the script.
Also i need to make it run endlessly untill ill kill the process/session....
from __future__ import print_function
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "ssss!"
DATABASE = "sss"
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO usa_news (articles) VALUES (%s)")
cursor.execute(insert_query, (articles,))
db.commit()
cursor.close()
db.close()
return
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=ssssssssss"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
#insert the data into the MySQL database
store_data(articles)
Finally made it work!
Your indents are all messed up, Python relies on indents. Didn't look at the code itself so it might still be bugged, but fixed the indents:
from __future__ import print_function
import requests
import re
import MySQLdb
import json
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
if __name__ == '__main__':
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
on_data(data)
Updated to reflect changes suggested in comments
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "xxxxx"
DATABASE = "xxxxx"
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=xxxxxxxxxxxxxxxxxxx"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO xxxxx (articles) VALUES (%s)")
cursor.execute(insert_query, (articles))
db.commit()
cursor.close()
db.close()
return
#insert the data into the MySQL database
store_data(articles)

Transfer outlook emails to MS SQL

I'm setting up a program that extracts the date, sender, subject and body of emails in my outlook. However, when I run the code it gives me the below error:
Traceback (most recent call last):
File "C:\Users\zaballgl\Documents\Adhoc\2019\April\ETL_MetricsEmailOutlook.py", line 83, in <module>
cursor.execute("INSERT INTO dbo.BSO_metricsEmailReports([Start_Date],[Name],[Subject],[Body])values(?,?,?,?)",row['Start_Date'],row['Name'],row['Subject'],row['Body'])
pyodbc.ProgrammingError: ('Invalid parameter type. param-index=1 param-type=CDispatch', 'HY105')
This is my code for extracting data from my outlook:
import win32com.client
import pandas as pd
import datetime
import numpy as np
import pyodbc
outlook = win32com.client.Dispatch("Outlook.Application").GetNamespace("MAPI")
inbox = outlook.Folders('email#outlook.com').Folders('Inbox')
messages = inbox.Items
message = messages.GetFirst()
rec_time = message.CreationTime
body_content = message.body
subj_line = message.subject
sender = message.Sender
year=[]
month=[]
day=[]
hour=[]
minute=[]
subject=[]
sender=[]
body = []
while message:
###This iterates every format of the message.CreationTime and append them to the list above
year.append(message.CreationTime.year)
month.append(message.CreationTime.month)
day.append(message.CreationTime.day)
hour.append(message.CreationTime.hour)
minute.append(message.CreationTime.minute)
## Iterates every subject and append them to the subject variable list
subject.append(message.subject)
## Iterates every sender name and append them to the sender variable list
sender.append(message.Sender)
## Iterates every sender name and append them to the sender variable list
body.append(message.body)
## Goes to the next email
message = messages.GetNext()
## This saves all the information to a context manager
#------COLUMNS FOR THE TABLE---------------#
#StartDate
date = pd.DataFrame({'year':year,'month':month,'day':day,'hour':hour,'minute':minute}) # Had to do this to bypass this error: ValueError: Tz-aware datetime.datetime cannot be converted to datetime64 unless utc=True
startDate = pd.to_datetime(date) # Using the above variable this converts this to dtype: datetime64[ns]
#Subject
subject = pd.Series(subject) # just a series of subject data
#Sender
sender = pd.Series(sender) # just a series of sender data
#Body
body = pd.Series(body) # just a series of sender data
df2 = pd.DataFrame({'Start_Date':startDate,'Name':sender, 'Subject': subject, 'Body':body})
And this is my code to transfer them to my MS SQL:
connStr = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server}; Server=someservername;DATABASE=somedatabase;UID=someID;PWD=somepassword#')
cursor = connStr.cursor()
deleteTable = "DELETE FROM dbo.BSO_metricsEmailReports"
cursor.execute(deleteTable)
for index,row in df2.iterrows():
cursor.execute("INSERT INTO dbo.BSO_metricsEmailReports([Start_Date], [Name],[Subject], [Body])values(?,?,?,?)",row['Start_Date'],row['Name'],row['Subject'],row['Body'] )
connStr.commit()
cursor.close()
connStr.close()
They would be sent to a table in my MS SQL 2014 with the below design:
**Column Name** | **Data Type**
Start_Date | datetime
Name | nchar(300)
Subject | nchar(300)
Body | nchar(300)
I'd check the execute line...
I use the ? for each parameter in the query and set it up:
sql_rollup =
'''
SELECT ID,FIRSTNAME, LASTNAME, .USERNAME, numSessions
FROM SESSION INNER JOIN
PERSONNEL ON SESSION.ID = PERSONNEL.ID
WHERE (SessionStartDT between ? AND ?) AND (SiteID = ?)
'''
Then I execute the above like this:
con = pyodbc.connect(
Trusted_connection='Yes',
Driver='{SQL Server}',
Server=myConfig["database"]["hostname"] + ',' + myConfig["database"]["port"],
Database=myConfig["database"]["database"]
)
con.autocommit = True
cur=con.cursor()
parms = (str(dateFirst), str(dateLast), siteID)
cur.execute(sql_rollup,parms)
Note the params is set up as a list and also (and I suspect this is where the problem lies), I convert the datatime values for dateFirst and dateLast to strings.
pyodbc doesn't understand Python objects. The database on the otherhand can automatically interpret strings and convert them into date/time values.
Does this help?

Python 3.5 insert sql stored variable

I am working on my first Python script to check email then parse out some data then insert into a Microsoft sql database. I am having problems with how to insert the saved variable t.
import imaplib
import email
import re
import _mssql
m = imaplib.IMAP4_SSL("imap.gmail.com", 993)
m.login("username","password$")
m.select('"Fire_Dispatch"')
result, data = m.search(None, "ALL") # search all email and return uids
if result == 'OK':
for num in data[0].split():
result, data = m.fetch(num,"(BODY[TEXT])")
if result == 'OK':
email_message = email.message_from_string(data[0] [1].decode('utf-8')) # raw email text including headers
p = str(email_message)
a = re.search("ALLCFD(.*)",p)
t = (a.groups(1))
print (t)
conn = _mssql.connect(server='server', user='username', password='password', database='CFD_Calls')
conn.execute_non_query("INSERT INTO calls (call_date_time,call)VALUES('3',t)")
m.close()
m.logout()
You'll need to provide the t variable into the sql execution. One way to do this:
conn.execute_non_query("INSERT INTO calls (call_date_time,call) VALUES('3',?)", (t,))
I'm assuming t will be a string. Note that when provided to the sql command, the variable is enclosed in a tuple. This is because the command expects the argument to be a sequence.

Categories