I'm trying to run terminal commands from a web python script.
I tried many things but none seens to work... Such as: add 'www-data' to sudoers, use full path to bin, run command with sudo word, use 3 different system calls (os.spawnl and subprocess) and none of that works.
Read only commands like "ps aux" that only output information works, but a simple echo to file don't. It seens like need permitions to do so. What more can i try?
Example from output: Unexpected error: (, CalledProcessError(2, '/bin/echo hello > /var/www/html/cgi-bin/test2.htm'), )
On that example the /var/www/html/cgi-bin/ folder is owned by "www-data", same user as server config.
<!-- language: python -->
#!/usr/bin/python3
# coding=utf-8
import os
import sys
import subprocess
import cgi
import subprocess
SCRIPT_PATH = "/var/www/html/scripts/aqi3.py"
DATA_FILE = "/var/www/html/assets/aqi.json"
KILL_PROCESS = "ps aux | grep " + SCRIPT_PATH + " | grep -v \"grep\" | awk '{print $2}' | xargs kill -9"
START_PROCESS = "/usr/bin/python3 " + SCRIPT_PATH + " start > /dev/null 2>&1 &"
STOP_PROCESS = "/usr/bin/python3 " + SCRIPT_PATH + " stop > /dev/null 2>&1 &"
# Don't edit
def killProcess():
os.spawnl(os.P_NOWAIT, KILL_PROCESS)
try:
os.spawnl(os.P_NOWAIT, "/bin/echo hello > /var/www/html/cgi-bin/test2.htm")
proc = subprocess.Popen(['sudo', 'echo', 'hello > /var/www/html/cgi-bin/test3.htm'])
print(subprocess.check_output("/bin/echo hello > /var/www/html/cgi-bin/test2.htm", shell=True, timeout = 10))
except:
print("Unexpected error:", sys.exc_info())
print(KILL_PROCESS)
def stopSensor():
killProcess()
os.spawnl(os.P_NOWAIT, STOP_PROCESS)
def restartProcess():
killProcess()
print(START_PROCESS)
print(os.spawnl(os.P_NOWAIT, START_PROCESS))
def main():
arguments = cgi.FieldStorage()
for key in arguments.keys():
value = arguments[key].value
if key == 'action':
if value == 'stop':
stopSensor()
print("ok")
return
elif value == 'start' or value == 'restart':
restartProcess()
print("ok")
return
elif value == 'resetdata':
try:
with open(DATA_FILE, 'w') as outfile:
outfile.write('[]')
except:
print("Unexpected error:", sys.exc_info())
print("ok")
return
print("?")
main()
I was able to solve my problem with: http://alexanderhoughton.co.uk/blog/lighttpd-changing-default-user-raspberry-pi/
Related
I am new to python scripting, we are trying to kill multiple java processes using the python script. Below is the script.
#!/usr/bin/env python3
import os, signal
def process():
name = ['test1.jar','test2.jar','test3.jar']
try:
for line in os.popen("ps ax | grep " + name + " | grep -v grep"):
fields = line.split()
pid = fields[0]
os.kill(int(pid), signal.SIGKILL)
print("Process Successfully terminated")
except:
print("Error Encountered while running script")
process()
We are not able to kill the process and end up with an "Error Encountered while running script" message.
After some error and trial I came up with the following solution (popen.py):
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import pty
import sys
from subprocess import Popen
from shlex import join
import errno
master_fd, slave_fd = pty.openpty()
cmd = join(sys.argv[1:])
print(">>", cmd)
try:
p = Popen(
cmd,
shell=True,
stdout=slave_fd,
stderr=slave_fd,
stdin=slave_fd,
close_fds=True,
universal_newlines=True,
)
os.close(slave_fd)
while p.returncode is None:
buffer = os.read(master_fd, 512)
if buffer:
os.write(1, buffer)
else:
break
except OSError as err:
if err.errno != errno.EIO:
raise
except KeyboardInterrupt:
print(f"\n## Err: Terminating the PID: {p.pid}")
p.terminate()
This works well in most of the cases:
> ./popen.py date
>> date
Wed 13 May 19:10:54 BST 2020
> ./popen.py date +'%F_%T'
>> date +%F_%T
2020-05-13_19:10:56
> ./popen.py bash -c 'while :; do echo -n .; sleep .5; done;'
>> bash -c 'while :; do echo -n .; sleep .5; done;'
.......^C
## Err: Terminating the PID: 840102
However it seems that my script is not capable to read the stdin:
> ./popen.py bash -c 'read -p "Enter something: " x; echo $x'
>> bash -c 'read -p "Enter something: " x; echo $x'
Enter something: bla bla
Come on... read it!!!
^C
## Err: Terminating the PID: 841583
> ./popen.py docker exec -it 9ab85463e3c1 sh
>> docker exec -it 9ab85463e3c1 sh
/opt/work_dir # ^[[20;19R
sfsdf
sdfsdf
^C
## Err: Terminating the PID: 847172
I've also tried to skip the os.close(slave_fd) step, but with exactly the same results :-/
My goal is to replicate bash script similar to the following (bash.sh):
#! /bin/bash --
ACT="${1:?## Err: Sorry, what should I do?}"
DID="${2:?## Err: Oh please.. Where\'s ID?}"
CMD=( docker exec -it )
case "${ACT}" in
(run)
shift 2
echo ">> ${CMD[#]}" "${DID}" "$#"
"${CMD[#]}" "${DID}" "$#"
;;
(*)
echo "## Err: Something went wrong..."
exit 1
;;
esac
Example:
> ./bash.sh run 9ab85463e3c1 sh
>> docker exec -it 9ab85463e3c1 sh
/opt/work_dir # date
Wed May 13 19:08:05 UTC 2020
/opt/work_dir # ^C
> ./bash.sh run 9ab85463e3c1 date +"%F_%T"
>> docker exec -it 9ab85463e3c1 date +%F_%T
2020-05-13_19:35:09
For my use case I eventually came up with the following script (wrapper.py):
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import pty
import fire
from shlex import split
from os.path import realpath, expanduser
def proc(cmd, cd=None):
"""Spawn a process, and connect its controlling terminal with the
current process's standard io."""
print("## PID: {}".format(os.getpid()))
def m_read(fd):
"""Master read function."""
return os.read(fd, 1024)
def i_read(fd):
"""Standard input read function."""
return os.read(fd, 1024)
cmd = cmd if isinstance(cmd, (list, tuple)) else split(cmd)
try:
cwd = os.getcwd()
if cd is not None:
print("## Changing directory: {}".format(realpath(cd)))
os.chdir(expanduser(os.fsdecode(cd)))
ex_msg = "\n## Exit Status Indicator: {}"
print(ex_msg.format(pty.spawn(cmd, m_read, i_read)))
except Exception as err:
print("## Err: {}".format(str(err)))
finally:
os.chdir(cwd)
if __name__ == "__main__":
fire.Fire({"run": proc})
It now can be used as a regular wrapper script:
> ./wrapper.py run -cd .. 'docker exec -it 9ab85463e3c1 sh'
## PID: 1679972
## Changing directory: /home
/opt/work_dir # date
Sat May 16 15:18:46 UTC 2020
/opt/work_dir # cat /etc/os-release
NAME="Alpine Linux"
ID=alpine
VERSION_ID=3.11.6
PRETTY_NAME="Alpine Linux v3.11"
HOME_URL="https://alpinelinux.org/"
BUG_REPORT_URL="https://bugs.alpinelinux.org/"
/opt/work_dir #
## Exit Status Indicator: 0
I hope that someone will find this helpful.
I have a bash script that gets called at the end of Python script. If I call the script manually with a simple Python script (test.py below) it works fine. However when called from my actual script (long.py) it fails. So long.py runs, calling rename.sh at the end, and passing it a linux directory path, source_dir. rename.sh renames a file in said path. Here's a relevant excerpt of that script:
long.py
PATH = '/var/bigbluebutton/published/presentation/'
LOGS = '/var/log/bigbluebutton/download/'
source_dir = PATH + meetingId + "/"
...
def main():
...
try:
create_slideshow(dictionary, length, slideshow, bbb_version)
ffmpeg.trim_audio_start(dictionary, length, audio, audio_trimmed)
ffmpeg.mux_slideshow_audio(slideshow, audio_trimmed, result)
serve_webcams()
# zipdir('./download/')
copy_mp4(result, source_dir + meetingId + '.mp4')
finally:
print >> sys.stderr, source_dir
#PROBLEM LINE
subprocess.check_call(["/scripts/./rename.sh", str(source_dir)])
print >> sys.stderr, "Cleaning up temp files..."
cleanup()
print >> sys.stderr, "Done"
if __name__ == "__main__":
main()
Here's the problem:
long.py uses the above line to call rename.sh:
subprocess.check_call(["/scripts/./rename.sh", str(source_dir)])
it gives the error:
subprocess.CalledProcessError: Command '['/scripts/./rename.sh', '/var/bigbluebutton/published/presentation/5b64bdbe09fdefcc3004c987f22f163ca846f1ea-1574708322765/']' returned non-zero exit status 1
The script otherwise works perfectly.
test.py, a shortened version of long.py, contains only the following two lines:
test.py
source_dir = '/var/bigbluebutton/published/presentation/5b64bdbe09fdefcc3004c987f22f163ca846f1ea-1574708322765/'
subprocess.check_call(["/scripts/./rename.sh", str(source_dir)])
It does not encounter the error when ran using python test.py.
Here's the contents of rename.sh:
rename.sh
#!/bin/bash
i=$1
a=$(grep '<name>' $i/metadata.xml | sed -e 's/<name>\(.*\)<\/name>/\1/' | tr -d ' ')
b=$(grep '<meetingName>' $i/metadata.xml | sed -e 's/<meetingName>\(.*\)<\/meetingName>/\1/' | tr -d ' ')
c=$(ls -alF $i/*.mp4 | awk '{ gsub(":", "_"); print $6"-"$7"-"$8 }')
d=$(echo $b)_$(echo $c).mp4
cp $i/*.mp4 /root/mp4s/$d
test.py and long.py are in the same location.
I'm not executing long.py manually; it gets executed by another program.
print >> sys.stderr, source_dir
confirms that the exact same value as I define explicitly in test.py is getting passed by long.py to rename.sh
Hi have python program in which a start method is defined, in start method i am calling a win32serviceutil.StartService(service) method to start a service, like
import os, platform, subprocess
try:
import win32serviceutil
except:
os.system("pip install pywin32")
os.system("pip install pypiwin32")
import win32serviceutil
OS = platform.system() #will get you the platform/OS
print("You are using ", OS)
if __name__=='__main__':
service = 'WSearch'
def startByCLI():
cmd = 'net start '+service
os.system(cmd)
def startByPython():
# subprocess.check_output(["sc", "start", service], stderr=subprocess.STDOUT)
win32serviceutil.StartService(service)
if OS=='Windows':
try:
output = startByPython()
except subprocess.CalledProcessError as e:
print(e.output)
print(e.returncode)
#os.system('python test2.py')
subprocess.call("python ./install.py asadmin", shell=True)
startByCLI()
so what i actually want is i want to run the start method from command promt like this
python ./myfile.py startByPython
and it will trigger the startByPython method in myfile.py
many thanks in advance
Hey all thanks for your attention,
i wanted to run my file.py file with argument from command line like:
$ /usr/bin/python myfile.py start
i got the solution which is
def main():
# read arguments from the command line and
# check whether at least two elements were entered
if len(sys.argv) < 2:
print "Usage: python aws.py {start|stop}\n"
sys.exit(0)
else:
action = sys.argv[1]
if action == "start":
startInstance()
elif action == "stop":
stopInstance()
else:
print "Usage: python aws.py {start|stop}\n"
I am having some issues getting a script to run.
This works perfectly from command line:
ssh root#ip.add.re.ss /usr/sbin/tcpdump -i eth0 -w - | /usr/sbin/tcpdump -r - -w /home/cuckoo/cuckoo/storage/analyses/1/saveit.pcap
However when I use this script:
#!/usr/bin/env python
import sys
import os
import subprocess
cmd = []
remote_cmd = []
local_cmd = []
connect_cmd = []
outfile = None
try:
connect_cmd = str.split(os.environ["RTCPDUMP_CMD"], " ")
except:
connect_cmd = str.split("ssh root#fw", " ")
remote_cmd.extend(str.split("/usr/sbin/tcpdump -w -", " "))
local_cmd.extend(str.split("/usr/sbin/tcpdump -r -", " "))
for argument in xrange(1, len(sys.argv)):
if sys.argv[argument] == "-w":
outfile=sys.argv[argument+1]
sys.argv[argument] = None
sys.argv[argument+1] = None
if sys.argv[argument] == "-i":
remote_cmd.append(sys.argv[argument])
remote_cmd.append(sys.argv[argument+1])
sys.argv[argument] = None
sys.argv[argument+1] = None
if not sys.argv[argument] == None:
if " " in sys.argv[argument]:
local_cmd.append("'" + sys.argv[argument] + "'")
remote_cmd.append("'" + sys.argv[argument] + "'")
else:
local_cmd.append(sys.argv[argument])
remote_cmd.append(sys.argv[argument])
if not outfile == None:
local_cmd.insert(1, "-w")
local_cmd.insert(2, outfile)
cmd.extend(connect_cmd)
cmd.extend(remote_cmd)
cmd.append("|")
cmd.extend(local_cmd)
try:
subprocess.call(cmd)
except KeyboardInterrupt:
exit(0)
It spawns both tcpdump processes on the remote host and the second tcpdump fails to save due to non working path. I added a print cmd at the end and the ssh command being passed to the prompt is exactly the same (when running the script itself, cuckoo passes a ton of options when it calls the script. Also it gets the -w - before the -i eth0, but I tested that and it works from command line as well).
So I am thoroughly stumped, why is the pipe to local not working in the script but it works from prompt?
Oh, and credit for the script belongs to Michael Boman
http://blog.michaelboman.org/2013/02/making-cuckoo-sniff-remotely.html
So I am thoroughly stumped, why is the pipe to local not working in the script but it works from prompt?
Because pipes are handled by the shell, and you're not running a shell.
If you look at the docs, under Replacing Older Functions with the subprocess Module, it explains how to do the same thing shell pipelines do. Here's the example:
output=`dmesg | grep hda`
# becomes
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
output = p2.communicate()[0]
So, in your terms:
cmd.extend(connect_cmd)
cmd.extend(remote_cmd)
try:
remote = subprocess.Popen(cmd, stdout=subprocess.PIPE)
local = subprocess.Popen(local_cmd, stdin=remote.stdout)
remote.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
local.communicate()
except KeyboardInterrupt:
exit(0)