python Popen chmod error starting Postgresql - python

I can run:
sudo service postgresql start
from the command line with no issues. However when I try running the following:
import os
from subprocess import Popen,PIPE
pwd = getsudopwd()
cmd = ['sudo','service',process,'state']
p = Popen(cmd,stdout=PIPE,stdin=PIPE,stderr=PIPE,universal_newlines=True)
out,err = p.communicate(pwd+'\n')
if err: raise RuntimeError(err)
I get the following error
chmod: changing permissions of '/var/run/postgresql': Operation not permitted. So, why is there is an error accessing the pid directory for postgresql when this is run from Python?

You can simply use -S with sudo:
from subprocess import Popen, PIPE
import getpass
pwd = getpass.getpass()
proc = Popen(['sudo', '-S', 'service',process,'state'],
stdout=PIPE, stdin=PIPE, stderr=PIPE,universal_newlines=True)
out,err= proc.communicate(input="{}\n".format(pwd))

i suggest you use the sh library
its very simple and easy to use
from sh import sudo
print sudo('service postgresql start')

Running sudo command with the -S option and piping your password to the stdin of the sudo command should solve your problem.
import os
from subprocess import Popen, PIPE
echo = Popen(('echo', 'mypasswd'), stdout = PIPE)
p = Popen(['sudo', '-S', 'service', 'postgresql', 'restart'], stdin = echo.stdout, stdout = PIPE, stderr = PIPE, universal_newlines = True)
out, err = p.communicate()
print out

Related

display issue when using subprocess to kick off command with bsub

I am using the subprocess to kick off commands with shell mode. However, when launching these commands by bsub. there is print issue', here is reproducing-issue code
commd.py
for i range(1000):
print(f'line{i}')
lanuch.py
from subprocess import Popen, PIPE, STDOUT
process = Popen('bsub -Is -q all.q -R "rusage [mem=50]" python3.7 command.py', shell=True, stdout=PIPE, stderr=STDOUT, encoding='utf-8')
try:
while True:
line = process.stdout.readline()
if((line=='') and (process.poll() is not None)):break
print(line)
exception Exception as e:
print(repr(e))
output like:
line0
_____line1
_________line2
___________....
line958
line959
...
line999
"_" means "blank". Any solution and comments are appreciated.
Thanks All

List of commands with Popen

Hi guys,
I have three commands for copy/paste the folder with a similar path. I using this code:
from subprocess import Popen, PIPE
cmd_list = [
'cp -r /opt/some_folder_1/ /home/user_name/',
'cp -r /var/some_folder_2/ /home/user_name/',
'cp -r /etc/some_folder_3/ /home/user_name/',
]
copy_paste = Popen(
cmd_list,
shell = True,
stdin = PIPE,
stdout = PIPE,
stderr = PIPE
)
stdout, stderr = make_copy.communicate()
But for the copy/paste three folders, I should three times run code.
Could you help me with these guys?
Thank you!
Do:
import subprocess
cmd_list = [
['cp', '-r', '/opt/some_folder_1/', '/home/user_name/'],
['cp', '-r', '/var/some_folder_2/', '/home/user_name/'],
['cp', '-r', '/etc/some_folder_3/', '/home/user_name/'],
]
for cmd in cmd_list:
res = subprocess.check_output(cmd)
# stdout and stderr are available at res.stdout and res.stderr
# An error is raised for non-zero return codes
When passing a list into Popen or any of the other subprocess functions, you're not able to pass multiple commands in the cmd_list. It is expected that the first item in the list is the command you are running, and everything else are parameters for that one command. This restriction helps keep your code safer, especially when using user supplied input.
Another options is you can join everything together into a single command with a double ampersand. When doing so if one command fails the remaining commands won't run.
copy_paste = Popen(
' && '.join(cmd_list),
shell = True,
stdin = PIPE,
stdout = PIPE,
stderr = PIPE
)

subprocess python multiple commands

would like to open an ssh session, run commands and get the output real-time as the process runs (this base will involve running additional commands on the remote server)
from subprocess import Popen, PIPE
with Popen(['ssh <server-domain-name>',
],shell=True,
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True) as ssh:
output1 = ssh.stdin.write('ls -l')
output2 = ssh.stdin.write('mkdir test')
status = ssh.poll()
print(output1)
print(output2)
so far this is what I have, using ssh.communicate[<command>] gives the right output but closes the subproceess after the first command, any thoughts?
worked for me
from fabric2 import Connection
with Connection('<host>') as c:
print(CGREEN +'connected succsfully!' + CEND)
#gather user info
user = io.StringIO
user = c.run("whoami", hide=True)
print(f'user found:{user.stdout} ')
#fetching files
c.run(<command>, pty=True)

How to get the process name by pid in Linux using Python?

I want to get the process name, given it's pid in python.
Is there any direct method in python?
The psutil package makes this very easy.
import psutil
process = psutil.Process(pid)
process_name = process.name()
If you want to see the running process, you can just use os module to execute the ps unix command
import os
os.system("ps")
This will list the processes.
But if you want to get process name by ID, you can try ps -o cmd= <pid>
So the python code will be
import os
def get_pname(id):
return os.system("ps -o cmd= {}".format(id))
print(get_pname(1))
The better method is using subprocess and pipes.
import subprocess
def get_pname(id):
p = subprocess.Popen(["ps -o cmd= {}".format(id)], stdout=subprocess.PIPE, shell=True)
return str(p.communicate()[0])
name = get_pname(1)
print(name)
Command name (only the executable name):
from subprocess import PIPE, Popen
def get_cmd(pid)
with Popen(f"ps -q {pid} -o comm=", shell=True, stdout=PIPE) as p:
return p.communicate()[0]
Command with all its arguments as a string:
from subprocess import PIPE, Popen
def get_args(pid)
with Popen(f"ps -q {pid} -o cmd=", shell=True, stdout=PIPE) as p:
return p.communicate()[0]

python subprocess communicate freezes

I have the following python code that hangs :
cmd = ["ssh", "-tt", "-vvv"] + self.common_args
cmd += [self.host]
cmd += ["cat > %s" % (out_path)]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(in_string)
It is supposed to save a string (in_string) into a remote file over ssh.
The file is correctly saved but then the process hangs. If I use
cmd += ["echo"] instead of
cmd += ["cat > %s" % (out_path)]
the process does not hang so I am pretty sure that I misunderstand something about the way communicate considers that the process has exited.
do you know how I should write the command so the the "cat > file" does not make communicate hang ?
-tt option allocates tty that prevents the child process to exit when .communicate() closes p.stdin (EOF is ignored). This works:
import pipes
from subprocess import Popen, PIPE
cmd = ["ssh", self.host, "cat > " + pipes.quote(out_path)] # no '-tt'
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(in_string)
You could use paramiko -- pure Python ssh library, to write data to a remote file via ssh:
#!/usr/bin/env python
import os
import posixpath
import sys
from contextlib import closing
from paramiko import SSHConfig, SSHClient
hostname, out_path, in_string = sys.argv[1:] # get from command-line
# load parameters to setup ssh connection
config = SSHConfig()
with open(os.path.expanduser('~/.ssh/config')) as config_file:
config.parse(config_file)
d = config.lookup(hostname)
# connect
with closing(SSHClient()) as ssh:
ssh.load_system_host_keys()
ssh.connect(d['hostname'], username=d.get('user'))
with closing(ssh.open_sftp()) as sftp:
makedirs_exists_ok(sftp, posixpath.dirname(out_path))
with sftp.open(out_path, 'wb') as remote_file:
remote_file.write(in_string)
where makedirs_exists_ok() function mimics os.makedirs():
from functools import partial
from stat import S_ISDIR
def isdir(ftp, path):
try:
return S_ISDIR(ftp.stat(path).st_mode)
except EnvironmentError:
return None
def makedirs_exists_ok(ftp, path):
def exists_ok(mkdir, name):
"""Don't raise an error if name is already a directory."""
try:
mkdir(name)
except EnvironmentError:
if not isdir(ftp, name):
raise
# from os.makedirs()
head, tail = posixpath.split(path)
if not tail:
assert path.endswith(posixpath.sep)
head, tail = posixpath.split(head)
if head and tail and not isdir(ftp, head):
exists_ok(partial(makedirs_exists_ok, ftp), head) # recursive call
# do create directory
assert isdir(ftp, head)
exists_ok(ftp.mkdir, path)
It makes sense that the cat command hangs. It is waiting for an EOF. I tried sending an EOF in the string but couldn't get it to work. Upon researching this question, I found a great module for streamlining the use of SSH for command line tasks like your cat example. It might not be exactly what you need for your usecase, but it does do what your question asks.
Install fabric with
pip install fabric
Inside a file called fabfile.py put
from fabric.api import run
def write_file(in_string, path):
run('echo {} > {}'.format(in_string,path))
And then run this from the command prompt with,
fab -H username#host write_file:in_string=test,path=/path/to/file

Categories