Python code in simulate mode - python

I have a python code that reads output from command line:
import subprocess
def get_prg_output():
p = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out
print get_prg_output()
In my office I want to simulate result in this mode:
def get_prg_output():
return 'ok - program executed'
print get_prg_output()
Is there an elegant way to do this without comment out the original function?
I've try this:
import subprocess
debug = True
if not debug:
def get_prg_output():
p = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out
else:
def get_prg_output():
return 'ok - program executed'
print get_prg_output()
but I don't like it.
Thanks

I'd do something like this:
def _get_prg_output_real():
p = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out
def _get_prg_output_fake():
return 'ok - program executed'
Here you can toggle between:
get_prg_output = _get_prg_output_fake
or
get_prg_output = _get_prg_output_real
based on user input, or commandline arguments or config files or commenting/uncommenting a single line of code ...

def get_prg_output_fake():
return 'ok - program executed'
then, if you want to enable this simulation, just do
get_prg_output=get_prg_output_fake

I'd use an environment variable (or a command switch) to control it myself so you don't have to make code changes to test.
import os
import subprocess
def _get_prg_output_real():
p = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out
def _get_prg_output_fake():
return 'ok - program executed'
if os.environ.get('DEBUG').lower() == 'true':
get_prg_output = _get_prg_output_fake
else:
get_prg_output = get_prg_output_real

Related

Capture real time `stdout` and `stderr` when run a function in a process python

I have a python function and want to run it as a separate process with multiprocessing package.
def run(ctx: Context):
print("hello world!")
return ctx
afterward running it as a separate process with the following script:
import multiprocessing
p = multiprocessing.Process(target=run, args=(ctx, ))
p.start()
p.join()
Now, I need to capture live stdout and stderr of the above process. Is there any way like as:
import subprocess
proc = subprocess.Popen(['python','fake_utility.py'],stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if not line:
break
But I need to pass the function not running a command with Popen. Do you know how can I read stdout when I run my function in a separate process?
My approach would be to create a custom context manager that can temporarily replace sys.stdout and sys.stderr with io.String() instances to capture the output and return this. For this you need to make the target of your Process a new function that can setup the context manager and return the results, for which a multiprocessing.Queue is used (this, by the way, would be needed anyway if you expect run to return its result back to the main process):
from multiprocessing import Process, Queue
from io import StringIO
import sys
class CaptureOutput:
def __enter__(self):
self._stdout_output = ''
self._stderr_output = ''
self._stdout = sys.stdout
sys.stdout = StringIO()
self._stderr = sys.stderr
sys.stderr = StringIO()
return self
def __exit__(self, *args):
self._stdout_output = sys.stdout.getvalue()
sys.stdout = self._stdout
self._stderr_output = sys.stderr.getvalue()
sys.stderr = self._stderr
def get_stdout(self):
return self._stdout_output
def get_stderr(self):
return self._stderr_output
def run(ctx):
print("hello world!")
print("It works!", file=sys.stderr)
raise Exception('Oh oh!') # Comment out to have a successful completion
return ctx
def worker(ctx, queue):
import traceback
with CaptureOutput() as capturer:
try:
result = run(ctx)
except Exception as e:
result = e
print(traceback.format_exc(), file=sys.stderr)
queue.put((result, capturer.get_stdout(), capturer.get_stderr()))
if __name__ == '__main__':
queue = Queue()
ctx = None # for demo purposes
p = Process(target=worker, args=(ctx, queue))
p.start()
# Must do this call before call to join:
result, stdout_output, stderr_output = queue.get()
p.join()
print('stdout:', stdout_output)
print('stderr:', stderr_output)
Prints:
stdout: hello world!
stderr: It works!
Traceback (most recent call last):
File "C:\Booboo\test\test.py", line 44, in worker
result = run(ctx)
File "C:\Booboo\test\test.py", line 36, in run
raise Exception('Oh oh!') # Comment out to have a successful completion
Exception: Oh oh!

How to get the returncode using subprocess

So I am trying to execute a file and get the returned value back using the python builtin methods available in the subprocess library.
For example, lets say I want to execute this hello_world python file:
def main():
print("in main")
return("hello world!")
if __name__ == '__main__':
main()
I do not care about getting back the statement in main. What I want to get back is the return value hello world!.
I tried numerous things but non of them worked.
Here's a list of what I tried and their outputs:
args is common for all trials:
args = ['python',hello_cmd]
First trial:
p1 = subprocess.Popen(args, stdout=subprocess.PIPE)
print(p1.communicate())
print("returncode is:")
print(p1.returncode)
output is:
(b'in main\n', None)
returncode is:
0
second trial:
p2 = subprocess.check_output(args,stderr=subprocess.STDOUT)
print(p2)
output is:
b'in main\n'
third trial:
output, result = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell=False).communicate()
print(output)
print(result)
output is:
b'in main\n'
b''
fourth trial:
p4 = subprocess.run(args, stdout=PIPE, stderr=PIPE)
print(p4)
output is:
CompletedProcess(args=['python', '/path/to/file/hello.py'], returncode=0, stdout=b'in main\n', stderr=b'')
fifth trial:
p5 =subprocess.getstatusoutput(args)
print(p5)
output is:
(0, '')
Can anyone help?
The return value of the main function is not the return code that is passed to the OS. To pass a return code to the OS use sys.exit(), which expects an integer. You can pass it a string, but if you do, Python will pass 1 to the OS.
You cannot return strings as return codes it must be an integer. If you want to act differently depending on the process. Try to map your return code to some function in your main program. For example
def execute_sub_program(): ...
# somewhere else:
return_code = execute_sub_program()
if return_code == 0:
# do something ...
elif ...
You can try with subprocess.run().returncode, it gives 0 if successful execution and 1 if failed execution.
driver.py
import subprocess
args = ['python', './hello_cmd.py']
status_code = subprocess.run(args).returncode
print(["Successful execution", "Failed execution"][status_code])
For happy flow (hello_cmd.py):
def main():
print("in main")
return("hello world!")
if __name__ == '__main__':
main()
For failed flow (hello_cmd.py):
def main():
print("in main")
raise ValueError('Failed')
if __name__ == '__main__':
main()

python subprocess custom return code [duplicate]

This question already has answers here:
Exit codes in Python
(14 answers)
Closed 4 years ago.
I'd like to return a value from a python subprocess call corresponding to the kind of error returned. E.g.:
test.py
RETCODE_OK = 0
RETCODE_THING_1_FAILED = 1
RETCODE_THING_2_FAILED = 2
def main():
return RETCODE_THING_2_FAILED
if __name__ == '__main__':
main()
Then I'm calling it with subprocess, like so:
>>> import subprocess
>>> proc = subprocess.Popen('python test.py', shell=True)
>>> proc.communicate()
(None, None)
>>> proc.returncode
0
I'd like this to return whatever was returned in main(), in this case 2. I've also got other stuff in the stdout and stderr streams, so I can't just print RETCODE_THING_2_FAILED and get the stdout.
Processes uses exit codes, not return statements.
You should use sys.exit(STATUS) rather than return STATUS statement:
test2.py:
---------
import sys
def main():
sys.exit(RETCODE_THING_2_FAILED)
if __name__ == '__main__':
main()
interpreter:
------------
>>> p = subprocess.Popen('python test2.py')
>>> p.communicate()
(None, None)
>>> p.returncode
2
This is because the process is actually closing/exiting, not returning a value to another function.

kill python thread while it waits for read for a long time

I need to terminate thread but can't check regularly any flags since it waits for reading/input.
Simple example:
import threading, time
class Test(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
print(input("wainting for input: "))
th = Test()
th.start()
time.sleep(5)
print("killing!")
th.join(5)
print(th.is_alive())
The more real example is this (kill thread when it hangs - no output for longer time):
import threading, time
class Test(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def call(args):
return subprocess.Popen(" ".join(args), shell=True, stderr=subprocess.PIPE)
def run(self):
mainProcess = call([ any program that could hang])
out = None
while mainProcess.returncode != 0 or out == '' and mainProcess.poll() != None:
out = mainProcess.stderr.read(1)
if out != '':
sys.stdout.write(out)
sys.stdout.flush()
th = Test()
th.start()
time.sleep(5)
print("killing!")
th.join(5)
print(th.is_alive())
If there is a better approach, I would be happy too.
Here's an example, how you can solve your hanging process problem with select:
import threading
import select
import subprocess
import sys
def watch_output(args, timeout):
process = subprocess.Popen(args, stdout=subprocess.PIPE)
while True:
ready_to_read, _, _ = select.select([process.stdout], [], [], timeout)
if not ready_to_read:
print "hanging process"
process.kill()
break
out = ready_to_read[0].read(1)
if not out:
print "normal exit"
break
sys.stdout.write(out)
sys.stdout.flush()
return process.wait()
watch_output(['ls'], timeout=10)
or even your input with timeout is possible:
def read_input(prompt, timeout):
sys.stdout.write(prompt)
sys.stdout.flush()
ready_to_read, _, _ = select.select([sys.stdin], [], [], timeout)
if not ready_to_read:
return None
return ready_to_read[0].readline()
print read_input("wainting for input (4s): ", 4)
You can just have the main thread kill the process. The reader thread will eventually hit EOF and then exit.
Example:
#!/usr/bin/env python
import threading
import subprocess
import time
import sys
def pipe_thread(handle):
print "in pipe_thread"
x = handle.readline()
while x:
print "got:", x[:-1]
x = handle.readline()
def main():
p = subprocess.Popen(["./sender"], stdout = subprocess.PIPE)
t = threading.Thread(target = pipe_thread, args = [p.stdout])
t.start()
print "sleeping for a while"
time.sleep(5)
print "killing process"
p.kill()
print "joining"
t.join()
print "joined"
main()

stdout readline() blocking

I'm trying to run a minecraft server through Python 2.7 and it's working fine.
But when I try to pass it the stop command, it doesn't do anything until the server outputs something.
This is my code:
import os, sys, subprocess, threading, time
class Server:
def start(self):
t = threading.Thread(target=self.run)
t.daemon = True
t.start()
def run(self):
self.p = subprocess.Popen('java -Xmx512M -Xms512M -jar minecraft_server.1.8.1.jar nogui',
cwd=os.path.join(os.getcwd(), 'server'),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
while True:
nextline = self.p.stdout.readline()
print self.p.poll()
if nextline == '' and self.p.poll() != None:
break
if not nextline == '':
sys.stdout.write(nextline)
sys.stdout.flush()
def stop(self):
self.p.communicate(input='stop')[0]
#endclass
s = Server()
s.start()
count = 0
# keep running
while True:
count += 1
if count == 15:
s.stop()
print "STOPPING SERVER"
time.sleep(1)
Image of output:
I'd like to have it not pause there.
Stopping it after 15 seconds is to test if I can get it working correctly, but I have no clue how to fix this. I saw some solutions using 'fcntl' but I want this to work on all platforms so that isn't an option for me.
How can I make it so I can run any command at any time?
Update:
import os, sys, subprocess, threading, time
class Server:
def start(self):
t = threading.Thread(target=self.run)
t.daemon = True
t.start()
def run(self):
self.p = subprocess.Popen('java -Xmx512M -Xms512M -jar minecraft_server.1.8.1.jar nogui',
cwd=os.path.join(os.getcwd(), 'server'),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
print "started server"
while True:
outs, errs = self.p.communicate(input=None)
print outs.decode()
print "TEST"
def stop(self):
self.p.stdin.write('stop')
#endclass
s = Server()
s.start()
count = 0
# keep running
while True:
count += 1
print count
if count == 15:
s.stop()
print "STOPPING SERVER"
time.sleep(1)
Image of output:
There are multiple problems here:
You are mixing calls to .communicate() and .stdout.readline(), which are both trying to read from stdout.
You are attempting to read from the subprocess's stdout on two separate threads simultaneously.
While neither of these things is strictly illegal, both are highly inadvisable and lead to problems like this one.
I would recommend having one thread monitor stdout/stderr (with .communicate(input=None)) and another manually talking to stdin (with .stdin.write() or similar).

Categories