run sql file with input from python - python

I have a .sql file which gets an input and runs, I should run it from a python code with some inputs, but it doesn't work. What is problem?
sql file:
declare
--define variables
v_workspace_id NUMBER;
BEGIN
select workspace_id into v_workspace_id
from apex_workspaces
where workspace = upper('&1');
DBMS_OUTPUT.PUT_LINE(v_workspace_id);
apex_application_install.set_workspace_id( v_workspace_id );
apex_application_install.generate_application_id;
apex_application_install.generate_offset;
EXCEPTION
WHEN OTHERS
THEN
RAISE;
END;
part of python file to run this file:
cmd_sql = 'echo quit | sqlplus -S ' + DB_USER + '/' + DB_USER_PWD + '#' + DB_HOST + ' #' + SQL_PATH + '\\' + 'install_apex_apps.sql ' + user_name + ' >> ' + LOG_FILE
os.system(cmd_sql)
user_name is given as an input to sql file.

PL/SQL
Change code to (note the /). Also as #Belayer sugessted in the comment section remove the EXCEPTION section.
SET SERVEROUTPUT ON;
declare
--define variables
v_workspace_id NUMBER;
BEGIN
select workspace_id into v_workspace_id
from apex_workspaces
where workspace = upper('&1');
DBMS_OUTPUT.PUT_LINE(v_workspace_id);
apex_application_install.set_workspace_id( v_workspace_id );
apex_application_install.generate_application_id;
apex_application_install.generate_offset;
END;
/
Python
Linux
import os
DB_USER = 'xxx'
DB_USER_PWD = 'xxx'
DB_HOST = 'xxx'
SQL_PATH = '/home/xxx/Documents/stack/'
LOG_FILE = '/home/xxx/Documents/stack/log.txt'
user_name = 'xxx'
# I override the log file with > for appending use >>
cmd_sql = 'echo quit | sqlplus -S ' + DB_USER + '/' + DB_USER_PWD + '#' + DB_HOST + ' #' + SQL_PATH + 'install_apex_apps.sql ' + user_name + ' > ' + LOG_FILE
os.system(cmd_sql)

Related

Python - Add .gzip attachment to email

If I use my Script there always comes this error:
IOError: [Errno 2] No such file or directory: "'/folder/my/20200114-013815/backup.sql.gz'"
Why can't the file be found? It's on the Path.
Or do I have to add a gzip encoding or whatever to attachment.add_header? Don't know what's wrong, it's the first time I tried to add an attachment in python.
Thanks
DB_HOST = 'XXXXXXX'
DB_USER = 'XXXXXXX'
DB_USER_PASSWORD = 'XXXXXXX'
DB_NAME = 'XXXXXXX'
BACKUP_PATH = '/folder/my'
DATETIME = time.strftime('%Y%m%d-%H%M%S')
TODAYBACKUPPATH = BACKUP_PATH + '/' + DATETIME
try:
os.stat(TODAYBACKUPPATH)
except:
os.mkdir(TODAYBACKUPPATH)
if os.path.exists(DB_NAME):
file1 = open(DB_NAME)
multi = 1
else:
multi = 0
if multi:
in_file = open(DB_NAME,"r")
flength = len(in_file.readlines())
in_file.close()
p = 1
dbfile = open(DB_NAME,"r")
while p <= flength:
db = dbfile.readline()
db = db[:-1]
dumpcmd = "mysqldump -h " + DB_HOST + " -u " + DB_USER + " -p" + DB_USER_PASSWORD + " " + db + " > " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(dumpcmd)
gzipcmd = "gzip " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(gzipcmd)
p = p + 1
dbfile.close()
else:
db = DB_NAME
dumpcmd = "mysqldump -h " + DB_HOST + " -u " + DB_USER + " -p" + DB_USER_PASSWORD + " " + db + " > " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(dumpcmd)
gzipcmd = "gzip " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(gzipcmd)
msg = MIMEMultipart()
message = "Test"
password = "XXXXXXXX"
msg['From'] = "XXXXXXXX"
msg['To'] = "XXXXXXXX"
msg['Subject'] = "Test"
filename = "'" + TODAYBACKUPPATH + "/backup.sql.gz'"
f = file(filename)
msg.attach(MIMEText(message, 'plain'))
attachment = MIMEText(f.read())
attachment.add_header('Content-Disposition', 'attachment', filename=filename)
msg.attach(attachment)
server = smtplib.SMTP('XXXXXXXX: 587')
server.starttls()
server.login(msg['From'], password)
server.sendmail(msg['From'], msg['To'], msg.as_string())
server.quit()
Are you using the absolute path here ? if not, try using it.
Your problem lies in your string concatenation, here:
filename = "'" + TODAYBACKUPPATH + "/backup.sql.gz'"
You need to remove the single quotes that you're adding - why are you adding them?
Change it to this:
filename = TODAYBACKUPPATH + "/backup.sql.gz"

Python Postgres query with transaction block over single connection

Currently I have two separate statements being passed through to Postgres (Greenplum).
1. Truncates a table
2. loads data using \copy
myStr="export PGPASSWORD=" + dbPass + "; psql -h " + dbHost + " -p " + dbPort + " -d " + dbName + " -U " + dbUser + " -c " + "\"" + "truncate table " + dbTable + ";\""
print(myStr)
subprocess.call(myStr,shell=True)
myStr="export PGPASSWORD=" + dbPass + "; psql -h " + dbHost + " -p " + dbPort + " -d " + dbName + " -U " + dbUser + " -c " + "\"" + "\\" + "copy " + dbTable + " from " + "'" + csvfile + "' with " + copyOpts + ";" + "select count(*) from " + dbTable + ";\""
print(myStr)
subprocess.call(myStr,shell=True)
Sometimes the load has errors but the truncate already happened, so I'm trying to run the two statements in one connection so I can put a transcation block (BEGIN ... COMMIT;) that way if the data load fails it will rollback to before the truncate happens.
I tried the below method:
myStr="export PGPASSWORD=" + dbPass + "; psql -h " + dbHost + " -p " + dbPort + " -d " + dbName + " -U " + dbUser + " -c " + "\"" + "truncate table " + dbTable + ";" + " \\" + "copy " + dbTable + " from " + "'" + csvfile + "' with " + copyOpts + ";" + "select count(*) from " + dbTable + ";\""
print(myStr)
Which resolves to the command:
export PGPASSWORD=abcde;
psql -h abcde.testserver.corp
-p 5432 -d namem -U username -c
"truncate table schema.example;
\copy schema.example from
'/home/testing/schema/schema.example_export.csv'
with header null as '' escape 'off' delimiter E',' ;
select count(*) from schema.example;"
However I am getting the error:
ERROR: syntax error at or near "\"
I believe this is due to the \ commands have to be on a separate line.
Is there a way to split the command into separate lines so I can execute ll the commands in a single connection?
The problem is that you can't separate backslash commands from other commands if you are using the -c option. You can send your commands via STDIN to psql using echo:
export PGPASSWORD=abcde;
echo "truncate table schema.example;
\copy schema.example from '/home/testing/schema/schema.example_export.csv' with header null as '' escape 'off' delimiter E',' ;
select count(*) from schema.example;" | psql -h abcde.testserver.corp -p 5432 -d namem -U username
That's a little bit clumsy. It's better to use subprocess.Popen
theCommand = """truncate table schema.example;
\copy schema.example from
'/home/testing/schema/schema.example_export.csv'
with header null as '' escape 'off' delimiter E',' ;
select count(*) from schema.example;"""
theProcess = subprocess.Popen("psql -h abcde.testserver.corp -p 5432 -d namem -U username",
stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
theOutput, theErrors = theProcess.communicate(input = theCommand)
But the best way should be avoiding shell commands and using a database adaptor like PyGreSQL.

python Filepath with spaces in configuration file

I have a configuration file I'm reading with ConfigParser.
One of the values is a file path containing spaces in the path:
[Configuration]
mysqldumpexecutable_location = C:/Program Files (x86)/MySQL/MySQL Server 5.7/bin/
It seems that the value returned with get() for this parameter is only "C:/Program" - up to the space.
Here's the relevant code:
import os
import time
import datetime
import configparser
def BackupDB():
try:
config = configparser.RawConfigParser()
config.read(r"etc\configuration.txt")
DB_HOST = config.get('Configuration', 'db_host')
DB_USER = config.get('Configuration', 'db_username')
DB_USER_PASSWORD = config.get('Configuration', 'db_password')
#DB_NAME = '/backup/dbnames.txt'
DB_NAME = config.get('Configuration', 'db_schema')
BACKUP_PATH = config.get('Configuration', 'backup_path')
MYSQLDUMP_LOCATION = config.get('Configuration', 'mysqldumpexecutable_location')
DATETIME = time.strftime('%d%m%Y-%H%M%S')
TODAYBACKUPPATH = BACKUP_PATH + DATETIME
db = DB_NAME
dumpcmd = MYSQLDUMP_LOCATION + "mysqldump -u " + DB_USER + " -p" + DB_USER_PASSWORD + " " + db + " > " + TODAYBACKUPPATH + "/" + db + ".sql"
os.system(dumpcmd)
except Exception as e:
print("something here")
return None
I'm getting this error:
'c:/Program' is not recognized as an internal or external command,
operable program or batch file.
How can I pass this Windows path correctly?
Thanks!

change sshpass to a more secure solution

I have a python script which contains the following function:
def upload2server(file):
host_name = 'example.ex.am.com'
port_num = '432'
user_name = 'user'
password = 'passw'
web_path = '/example/files/'
full_webpath = user_name + '#' + host_name + ':' + web_path + args.key
pre_command = 'sshpass -p "' + password + '" scp -P' + ' ' + port_num + ' '
scp_comm = pre_command + file + ' ' + full_webpath
os.system(scp_comm)
I'd have 2 questions:
How unsecure is that if I run this script from a remote network using port-forwarding?
Which ways could I make this uploading more secure?
Thanks!
Personally, I would generate an SSH keypair for each host and then you can totally forget about using the password in your scp command. Having your password inline isn't a problem per say but it does mean that your password will get recorded in the ~/.bash_history file of that user.

Mysql data to be save as CSV/Excel using flask-python when button click

I'm new at exporting data, I research all over the net but it was really hard for me to understand, can someone help me to know the basic about it.
This is my main problem: I want to download a specific data from mysql base on the date range I choose in my client, then when I click the download button, I want these data from mysql to be save in my computer together the user have the option to save it as CSV/Excel, I'm using python for my webservice. Thank you
This is my code right know in my webservice:
#api.route('/export_file/', methods=['GET', 'POST'])
def export_file():
if request.method == 'POST':
selectAttendance = """SELECT * FROM attendance"""
db.session.execute(selectAttendance)
db.session.commit()
f = csv.writer(open("file.csv", "w"))
for row in selectAttendance:
f.writerow([str(row)])
return jsonify({'success': True})
In general:
Set the mime header "Content-Type" part of the http header to the corresponding MIME-Type matching your data.
This tells the browser what type of data the webserver is going to send.
Send the actual data in the 'body'
With flask:
Forcing application/json MIME type in a view (Flask)
http://flask.pocoo.org/docs/0.10/patterns/streaming/
def get(self):
try:
os.stat(BACKUP_PATH)
except:
os.mkdir(BACKUP_PATH)
now = datetime.now() # current date and time
year = now.strftime("%Y")
month = now.strftime("%m")
day = now.strftime("%d")
time = now.strftime("%H:%M:%S")
date_time = now.strftime("%d_%m_%Y_%H:%M:%S")
TODAYBACKUPPATH = BACKUP_PATH + '/' + date_time
try:
os.stat(TODAYBACKUPPATH)
except:
os.mkdir(TODAYBACKUPPATH)
print ("checking for databases names file.")
if os.path.exists(DB_NAME):
file1 = open(DB_NAME)
multi = 1
print ("Databases file found...")
print ("Starting backup of all dbs listed in file " + DB_NAME)
else:
print ("Databases file not found...")
print ("Starting backup of database " + DB_NAME)
multi = 0
if multi:
in_file = open(DB_NAME,"r")
flength = len(in_file.readlines())
in_file.close()
p = 1
dbfile = open(DB_NAME,"r")
while p <= flength:
db = dbfile.readline() # reading database name from file
db = db[:-1] # deletes extra line
dumpcmd = "mysqldump -h " + DB_HOST + " -u " + DB_USER + " -p" + DB_USER_PASSWORD + " " + db + " > " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(dumpcmd)
gzipcmd = "gzip " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(gzipcmd)
p = p + 1
dbfile.close()
else:
db = DB_NAME
dumpcmd = "mysqldump -h " + DB_HOST + " -u " + DB_USER + " -p" + DB_USER_PASSWORD + " " + db + " > " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(dumpcmd)
gzipcmd = "gzip " + pipes.quote(TODAYBACKUPPATH) + "/" + db + ".sql"
os.system(gzipcmd)
# t = ("Your backups have been created in '" + TODAYBACKUPPATH + "' directory")
return "Your Folder have been created in '" + TODAYBACKUPPATH + "'."

Categories