Python program doesn`t run in the background even after using & .. why? - python

I have a python program which curls to a machine and tries to run that file and if the file runs in 300secs its fine else it kills it.
import threading,datetime,signal,os
from threading import Thread
import logging,time
from subprocess import Popen,PIPE
import subprocess,os,errno
from xmlrpclib import ServerProxy
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
id = 1
p=Popen('hostname',stdout=PIPE,stderr=PIPE)
out,err=p.communicate()
global hostname
hostname=out.replace('\n','')
dir_path='/export/home/host/'+str(hostname)
curl_path='http://1.1.1.1:8080/host_exec/'+str(hostname)
def run_file(new_id,data):
data=data.split(',')
if len(data)>1:
session=Popen(str(data[0])+' '+str(data[1]),shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE)
else:
session=Popen(str(data[0]),shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE)
start = datetime.datetime.now()
print 'pid is ',session.pid
stdout1=''
stderr1=''
while session.poll() is None:
print 'sleeping'
time.sleep(10)
now = datetime.datetime.now()
#Kill the process if its still running and timeout period is over ( max 20 secs )
if (now - start).seconds > 10:
os.kill(session.pid, signal.SIGKILL)
os.waitpid(-1, os.WNOHANG)
killed_flag=1
break;
#This condition is checked to see if the process finished executing and doesn`t need to be killed - success
elif session.poll() is not None:
print 'Process has finished'
stdout1=session.stdout.read()
stderr1=session.stdout.read()
break;
#If timeout is still remaining - wait for it to finish or get killed
else:
print 'still executing'
if killed_flag==1:
stdout1=' THE SCRIPT COULDN`T COMPLETE EXECUTION ON HOSTNAME '+str(hostname)+' AS IT TOOK MORE THAN 5 MINUTES TO FINISH AND HENCE GOT KILLED : TRY RERUNNING IT OR RUN IT MANUALLY ON THE BOX'
print stdout1
ops=stdout1 + stderr1
s = ServerProxy('http://paste/',allow_none=True)
page_url=s.pastes.newPaste("text",ops,None)
#print page_url
#print stdout
#Connect to db and update the table with the clob generated
session = Popen(['sqlplus','-S','abc/abc:1500'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
if flag==1:
#print flag
sql='update abc set temp_op = temp_op || \''+str(page_url)+',\', temp_db = temp_db || \''+str(hostname)+',\' where job_id=\''+str(new_id)+'\' ;'
if flag==2:
#print flag
sql='update abc config set output=\''+str(page_url)+'\', job_status=\'Completed\' where job_id=\''+str(new_id)+'\' ;'
session.stdin.write(sql)
session.stdin.flush()
stdout, stderr = session.communicate()
#print stdout
#print stderr
def do_this():
print "Running with Current Id : ", id
session=Popen('/export/home/curl '+str(curl_path)+'/filenames.txt',shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE)
stdout,stderr= session.communicate()
files_in_dir=stdout.split(' ')
if(len(files_in_dir)>1):
print files_in_dir
for file_name in files_in_dir:
if file_name:
file_list=file_name.split('_')
new_id=file_list[1]
if new_id>id:
session=Popen('/export/home/curl '+str(curl_path)+'/'+str(file_name),shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE)
file_content,stderr= session.communicate()
t = Thread(target=run_file,args=(new_id,file_content,))
t.start()
global id
id = new_id
else:
print 'No new file to process'
else:
print "EMPTY FOLDER"
while True:
do_this()
time.sleep(10)
But when I run it like
python abc.py &
It doesn`t run in the background. Why?
Also, When I do a CTRL+C or COMMAND+C to kill it .. it still keeps on running.

Related

Get returncode of a detached subprocess?

I'm trying to write a submitter for a job scheduler. As I do not know when the jobs come and how long the jobs will run, I use multiprocessing to spawn one process for each job with subprocess and detach to be able to process the next job. Meanwhile this works pretty good, but I'd like to get the returncode after the jobs finished, is that possible? I tried several subprocess variations, but those returning the RC were blocking the process for the runtime of the job.
#!/usr/bin/python3
# coding=utf-8
import time
import multiprocessing
import subprocess
JobsList = []
def SubmitJob(jobname):
""" Submit the next requested job """
print(f"Starting job {jobname}...")
JobDir ="/home/xxxxx/Jobs/"
JobMem = "{}{}.sh".format(JobDir, jobname)
SysoutFile = "./Sysout/{}.out".format(jobname)
fh = open(SysoutFile, 'w')
kwargs = {}
kwargs.update(start_new_session=True)
p = subprocess.Popen(JobMem, shell = False, stdout = fh, **kwargs)
pid = p.pid
print(f"Job {jobname} pid {pid} submitted...")
def PrepareSubmit():
""" Create and start one process per job """
jobs = []
for Job in JobsList:
process = multiprocessing.Process(target=SubmitJob,
args=(Job,))
jobs.append(process)
JobsList.remove(Job)
for j in jobs:
j.start()
for j in jobs:
j.join()
print("All jobs submitted...")
def main():
""" Check queue for new job requests """
number_of_lines = 0
jobs_list = []
while 1:
job_queue = open("/home/xxxxx/Development/Python/#Projects/Scheduler/jobs.que", 'r')
lines = job_queue.readlines()
if len(lines) > number_of_lines:
jobs_list.append(lines[len(lines)-1])
NewJob = lines[len(lines)-1][:-1]
JobsList.append(NewJob)
PrepareSubmit()
number_of_lines = number_of_lines+1
time.sleep(1)
if __name__ == "__main__":
main()
The while loop in main() is for testing purpose only.
Can any someone tell me if that is possible and how? Thanks in advance.
This is the code that gives me a return code but doesn't send a job until the previous job is finished. So if I have a long-running job, it delays the process of running jobs, what I called blocking.
def Submit(job):
""" Submit the next requested job """
print(f"Starting job {job}...")
JobDir ="/home/uwe/Jobs/"
JobMem = "{}{}.sh".format(JobDir, job)
SysoutFile = "./Sysout/{}.out".format(job)
fh = open(SysoutFile, 'w')
kwargs = {}
kwargs.update(start_new_session=True)
p = subprocess.Popen(JobMem, shell = False, stdout = fh, **kwargs)
pid = p.pid
while p.poll() == None:
a = p.poll()
print(a)
time.sleep(1)
else:
rc = p.returncode
print(f"PID: {pid} rc: {rc}")
def main():
JobsList = ['JOB90501','JOB00001','JOB00002','JOB00003']
for Job in JobsList:
Submit(Job)
Roy, this is my current code after your last hint:
def SubmitJob(jobname):
""" Submit the next requested job """
JobDir ="/home/uwe/Jobs/"
JobMem = "{}{}.sh".format(JobDir, jobname)
SysoutFile = "./Sysout/{}.out".format(jobname)
fh = open(SysoutFile, 'w')
kwargs = {}
kwargs.update(start_new_session=True)
p = subprocess.Popen(JobMem, shell = False, stdout = fh, **kwargs)
ProcessList[p] = p.pid
print(f"Started job {jobname} - PID: {p.pid}")
def main():
c_JobsList = ['JOB00001','JOB00002','JOB00003']
for Job in c_JobsList:
SubmitJob(Job)
for p, pid in ProcessList.items():
RcFile = "./Sysout/{}.rc".format(pid)
f = open(RcFile, 'w')
while p.poll() == None:
a = p.poll()
time.sleep(1)
else:
rc = p.returncode
f.writelines(str(rc))
print(f"PID: {pid} rc: {rc}")
f.close()
and the output:
Started job JOB00001 - PID: 5426
Started job JOB00002 - PID: 5427
Started job JOB00003 - PID: 5429
PID: 5426 rc: 0
PID: 5427 rc: 0
PID: 5429 rc: 8
Edit (the original answer below for future reference)
The natuaram means to use for this purpose is Popen.poll, but apparently it doesn't work in some cases (see https://lists.gt.net/python/bugs/633489). The solution I'd like to propose is using Popen.wait with a very short timeout, as in the following code sample:
import subprocess
import time
p = subprocess.Popen(["/bin/sleep", "3"])
print(f"Created process {p.pid}")
count = 0
while True:
try:
ret = p.wait(.001) # wait for 1 ms
print(f"Got a return code {ret}")
break
except subprocess.TimeoutExpired as e:
print("..", end = "")
time.sleep(.5)
print(f"Still waiting, count is {count}")
count += 1
print ("Done!")
The output I'm getting is:
Created process 30040
..Still waiting, count is 0
..Still waiting, count is 1
..Still waiting, count is 2
..Still waiting, count is 3
..Still waiting, count is 4
..Still waiting, count is 5
Got a return code 0
Done
Original idea - Popen.poll
The method you should be using is Popen.poll (documentation). It returns the exit status of the process, or None if it's still running.
To use it, you'll have to keep the 'popen' objects you get when you call subprocess.Popen, and later in time poll on these objects.

Python subprocess with real-time input and multiple consoles

The main issue
In a nutshell: I want two consoles for my programm. One for active user input. And the other one for pure log output. (Working code including the accepted answer is in the question's text below, under section "Edit-3". And under section "Edit-1" and section "Edit-2" are functioning workarounds.)
For this I have a main command line Python script, which is supposed to open an additional console for log output only. For this I intend to redirect the log output, which would be printed on the main script's console, to the stdin of the second console, which I start as a subprocess. (I use subprocess, because I didn't find any other way to open a second console.)
The problem is, that it seems that I'm able to send to the stdin of this second console - however, nothing gets printed on this second console.
Following is the code I used for experimenting (with Python 3.4 on PyDev under Windows 10). The function writing(input, pipe, process) contains the part, where the generated string is copied to the as pipe passed stdin, of the via subprocess opened console. The function writing(...) is run via the class writetest(Thread). (I left some code, which I commented out.)
import os
import sys
import io
import time
import threading
from cmd import Cmd
from queue import Queue
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
REPETITIONS = 3
# Position of "The class" (Edit-2)
# Position of "The class" (Edit-1)
class generatetest(threading.Thread):
def __init__(self, queue):
self.output = queue
threading.Thread.__init__(self)
def run(self):
print('run generatetest')
generating(REPETITIONS, self.output)
print('generatetest done')
def getout(self):
return self.output
class writetest(threading.Thread):
def __init__(self, input=None, pipe=None, process=None):
if (input == None): # just in case
self.input = Queue()
else:
self.input = input
if (pipe == None): # just in case
self.pipe = PIPE
else:
self.pipe = pipe
if (process == None): # just in case
self.process = subprocess.Popen('C:\Windows\System32\cmd.exe', universal_newlines=True, creationflags=CREATE_NEW_CONSOLE)
else:
self.process = proc
threading.Thread.__init__(self)
def run(self):
print('run writetest')
writing(self.input, self.pipe, self.process)
print('writetest done')
# Position of "The function" (Edit-2)
# Position of "The function" (Edit-1)
def generating(maxint, outline):
print('def generating')
for i in range(maxint):
time.sleep(1)
outline.put_nowait(i)
def writing(input, pipe, process):
print('def writing')
while(True):
try:
print('try')
string = str(input.get(True, REPETITIONS)) + "\n"
pipe = io.StringIO(string)
pipe.flush()
time.sleep(1)
# print(pipe.readline())
except:
print('except')
break
finally:
print('finally')
pass
data_queue = Queue()
data_pipe = sys.stdin
# printer = sys.stdout
# data_pipe = os.pipe()[1]
# The code of 'C:\\Users\\Public\\Documents\\test\\test-cmd.py'
# can be found in the question's text further below under "More code"
exe = 'C:\Python34\python.exe'
# exe = 'C:\Windows\System32\cmd.exe'
arg = 'C:\\Users\\Public\\Documents\\test\\test-cmd.py'
arguments = [exe, arg]
# proc = Popen(arguments, universal_newlines=True, creationflags=CREATE_NEW_CONSOLE)
proc = Popen(arguments, stdin=data_pipe, stdout=PIPE, stderr=PIPE,
universal_newlines=True, creationflags=CREATE_NEW_CONSOLE)
# Position of "The call" (Edit-2 & Edit-1) - file init (proxyfile)
# Position of "The call" (Edit-2) - thread = sockettest()
# Position of "The call" (Edit-1) - thread0 = logtest()
thread1 = generatetest(data_queue)
thread2 = writetest(data_queue, data_pipe, proc)
# time.sleep(5)
# Position of "The call" (Edit-2) - thread.start()
# Position of "The call" (Edit-1) - thread0.start()
thread1.start()
thread2.start()
# Position of "The call" (Edit-2) - thread.join()
# Position of "The call" (Edit-1) - thread.join()
thread1.join(REPETITIONS * REPETITIONS)
thread2.join(REPETITIONS * REPETITIONS)
# data_queue.join()
# receiver = proc.communicate(stdin, 5)
# print('OUT:' + receiver[0])
# print('ERR:' + receiver[1])
print("1st part finished")
A slightly different approach
The following additional code snippet works in regard to extracting the stdout from the subprocess. However, the previously sent stdin still isn't print on the second console. Also, the second console is closed immediately.
proc2 = Popen(['C:\Python34\python.exe', '-i'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
creationflags=CREATE_NEW_CONSOLE)
proc2.stdin.write(b'2+2\n')
proc2.stdin.flush()
print(proc2.stdout.readline())
proc2.stdin.write(b'len("foobar")\n')
proc2.stdin.flush()
print(proc2.stdout.readline())
time.sleep(1)
proc2.stdin.close()
proc2.terminate()
proc2.wait(timeout=0.2)
print("Exiting Main Thread")
More info
As soon as I use one of the paramaters stdin=data_pipe, stdout=PIPE, stderr=PIPE for starting the subprocess, the resulting second console isn't active and doesn't accept keyboard input (which isn't desired, though might be helpful information here).
The subprocess method communicate() can't be used for this as it waits for the process to end.
More code
Finally the code for the file, which is for the second console.
C:\Users\Public\Documents\test\test-cmd.py
from cmd import Cmd
from time import sleep
from datetime import datetime
INTRO = 'command line'
PROMPT = '> '
class CommandLine(Cmd):
"""Custom console"""
def __init__(self, intro=INTRO, prompt=PROMPT):
Cmd.__init__(self)
self.intro = intro
self.prompt = prompt
self.doc_header = intro
self.running = False
def do_dummy(self, args):
"""Runs a dummy method."""
print("Do the dummy.")
self.running = True
while(self.running == True):
print(datetime.now())
sleep(5)
def do_stop(self, args):
"""Stops the dummy method."""
print("Stop the dummy, if you can.")
self.running = False
def do_exit(self, args):
"""Exits this console."""
print("Do console exit.")
exit()
if __name__ == '__main__':
cl = CommandLine()
cl.prompt = PROMPT
cl.cmdloop(INTRO)
Thoughts
So far I'm even not certain if the Windows command line interface offers the capability to accept other input than the one from the keyboard (instead of the desired stdin pipe or similar). Though, with it having some sort of passive mode, I expect it.
Why is this not working?
Edit-1: Workaround via file (proof of concept)
Using a file as workaround in order display it's new content, as suggested in the answer of Working multiple consoles in python, is working in general. However, since the log file will grow up to many GB, it isn't a practical solution in this case. It would at least require file splitting and the proper handling of it.
The class:
class logtest(threading.Thread):
def __init__(self, file):
self.file = file
threading.Thread.__init__(self)
def run(self):
print('run logtest')
logging(self.file)
print('logtest done')
The function:
def logging(file):
pexe = 'C:\Python34\python.exe '
script = 'C:\\Users\\Public\\Documents\\test\\test-004.py'
filek = '--file'
filev = file
file = open(file, 'a')
file.close()
time.sleep(1)
print('LOG START (outer): ' + script + ' ' + filek + ' ' + filev)
proc = Popen([pexe, script, filek, filev], universal_newlines=True, creationflags=CREATE_NEW_CONSOLE)
print('LOG FINISH (outer): ' + script + ' ' + filek + ' ' + filev)
time.sleep(2)
The call:
# The file tempdata is filled with several strings of "0\n1\n2\n"
# Looking like this:
# 0
# 1
# 2
# 0
# 1
# 2
proxyfile = 'C:\\Users\\Public\\Documents\\test\\tempdata'
f = open(proxyfile, 'a')
f.close()
time.sleep(1)
thread0 = logtest(proxyfile)
thread0.start()
thread0.join(REPETITIONS * REPETITIONS)
The tail script ("test-004.py"):
As Windows doesn't offer the tail command, I used the following script instead (base on the answer for How to implement a pythonic equivalent of tail -F?), which worked for this. The additional, yet kind of unnecessary class CommandLine(Cmd) was initially an attempt to keep the second console open (because the script file argument was missing). Though, it also proved itself as useful for keeping the console fluently printing the new log file content. Otherwise the output wasn't deterministic/predictable.
import time
import sys
import os
import threading
from cmd import Cmd
from argparse import ArgumentParser
def main(args):
parser = ArgumentParser(description="Parse arguments.")
parser.add_argument("-f", "--file", type=str, default='', required=False)
arguments = parser.parse_args(args)
if not arguments.file:
print('LOG PRE-START (inner): file argument not found. Creating new default entry.')
arguments.file = 'C:\\Users\\Public\\Documents\\test\\tempdata'
print('LOG START (inner): ' + os.path.abspath(os.path.dirname(__file__)) + ' ' + arguments.file)
f = open(arguments.file, 'a')
f.close()
time.sleep(1)
words = ['word']
console = CommandLine(arguments.file, words)
console.prompt = ''
thread = threading.Thread(target=console.cmdloop, args=('', ))
thread.start()
print("\n")
for hit_word, hit_sentence in console.watch():
print("Found %r in line: %r" % (hit_word, hit_sentence))
print('LOG FINISH (inner): ' + os.path.abspath(os.path.dirname(__file__)) + ' ' + arguments.file)
class CommandLine(Cmd):
"""Custom console"""
def __init__(self, fn, words):
Cmd.__init__(self)
self.fn = fn
self.words = words
def watch(self):
fp = open(self.fn, 'r')
while True:
time.sleep(0.05)
new = fp.readline()
print(new)
# Once all lines are read this just returns ''
# until the file changes and a new line appears
if new:
for word in self.words:
if word in new:
yield (word, new)
else:
time.sleep(0.5)
if __name__ == '__main__':
print('LOG START (inner - as main).')
main(sys.argv[1:])
Edit-1: More thoughts
Three workarounds, which I didn't try yet and might work are sockets (also suggested in this answer Working multiple consoles in python), getting a process object via the process ID for more control, and using the ctypes library for directly accessing the Windows console API, allowing to set the screen buffer, as the console can have multiple buffers, but only one active buffer (stated in the remarks of the documentation for the CreateConsoleScreenBuffer function).
However, using sockets might be the easiest one. And at least the size of the log doesn't matter this way. Though, connection problems might be a problem here.
Edit-2: Workaround via sockets (proof of concept)
Using sockets as workaround in order display new log enties, as it also was suggested in the answer of Working multiple consoles in python, is working in general, too. Though, this seems to be too much effort for something, which should be simply sent to the process of the receiving console.
The class:
class sockettest(threading.Thread):
def __init__(self, host, port, file):
self.host = host
self.port = port
self.file = file
threading.Thread.__init__(self)
def run(self):
print('run sockettest')
socketing(self.host, self.port, self.file)
print('sockettest done')
The function:
def socketing(host, port, file):
pexe = 'C:\Python34\python.exe '
script = 'C:\\Users\\Public\\Documents\\test\test-005.py'
hostk = '--address'
hostv = str(host)
portk = '--port'
portv = str(port)
filek = '--file'
filev = file
file = open(file, 'a')
file.close()
time.sleep(1)
print('HOST START (outer): ' + pexe + script + ' ' + hostk + ' ' + hostv + ' ' + portk + ' ' + portv + ' ' + filek + ' ' + filev)
proc = Popen([pexe, script, hostk, hostv, portk, portv, filek, filev], universal_newlines=True, creationflags=CREATE_NEW_CONSOLE)
print('HOST FINISH (outer): ' + pexe + script + ' ' + hostk + ' ' + hostv + ' ' + portk + ' ' + portv + ' ' + filek + ' ' + filev)
time.sleep(2)
The call:
# The file tempdata is filled with several strings of "0\n1\n2\n"
# Looking like this:
# 0
# 1
# 2
# 0
# 1
# 2
proxyfile = 'C:\\Users\\Public\\Documents\\test\\tempdata'
f = open(proxyfile, 'a')
f.close()
time.sleep(1)
thread = sockettest('127.0.0.1', 8888, proxyfile)
thread.start()
thread.join(REPETITIONS * REPETITIONS)
The socket script ("test-005.py"):
The following script is based on Python: Socket programming server-client application using threads. Here I just keept the class CommandLine(Cmd) as log entry generator. At this point it should't be a problem, to put client into the main script, which calls the second console and then feed the queue with real log enties instead of (new) file lines. (The server is the printer.)
import socket
import sys
import threading
import time
from cmd import Cmd
from argparse import ArgumentParser
from queue import Queue
BUFFER_SIZE = 5120
class CommandLine(Cmd):
"""Custom console"""
def __init__(self, fn, words, queue):
Cmd.__init__(self)
self.fn = fn
self.words = words
self.queue = queue
def watch(self):
fp = open(self.fn, 'r')
while True:
time.sleep(0.05)
new = fp.readline()
# Once all lines are read this just returns ''
# until the file changes and a new line appears
self.queue.put_nowait(new)
def main(args):
parser = ArgumentParser(description="Parse arguments.")
parser.add_argument("-a", "--address", type=str, default='127.0.0.1', required=False)
parser.add_argument("-p", "--port", type=str, default='8888', required=False)
parser.add_argument("-f", "--file", type=str, default='', required=False)
arguments = parser.parse_args(args)
if not arguments.address:
print('HOST PRE-START (inner): host argument not found. Creating new default entry.')
arguments.host = '127.0.0.1'
if not arguments.port:
print('HOST PRE-START (inner): port argument not found. Creating new default entry.')
arguments.port = '8888'
if not arguments.file:
print('HOST PRE-START (inner): file argument not found. Creating new default entry.')
arguments.file = 'C:\\Users\\Public\\Documents\\test\\tempdata'
file_queue = Queue()
print('HOST START (inner): ' + ' ' + arguments.address + ':' + arguments.port + ' --file ' + arguments.file)
# Start server
thread = threading.Thread(target=start_server, args=(arguments.address, arguments.port, ))
thread.start()
time.sleep(1)
# Start client
thread = threading.Thread(target=start_client, args=(arguments.address, arguments.port, file_queue, ))
thread.start()
# Start file reader
f = open(arguments.file, 'a')
f.close()
time.sleep(1)
words = ['word']
console = CommandLine(arguments.file, words, file_queue)
console.prompt = ''
thread = threading.Thread(target=console.cmdloop, args=('', ))
thread.start()
print("\n")
for hit_word, hit_sentence in console.watch():
print("Found %r in line: %r" % (hit_word, hit_sentence))
print('HOST FINISH (inner): ' + ' ' + arguments.address + ':' + arguments.port)
def start_client(host, port, queue):
host = host
port = int(port) # arbitrary non-privileged port
queue = queue
soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
soc.connect((host, port))
except:
print("Client connection error" + str(sys.exc_info()))
sys.exit()
print("Enter 'quit' to exit")
message = ""
while message != 'quit':
time.sleep(0.05)
if(message != ""):
soc.sendall(message.encode("utf8"))
if soc.recv(BUFFER_SIZE).decode("utf8") == "-":
pass # null operation
string = ""
if (not queue.empty()):
string = str(queue.get_nowait()) + "\n"
if(string == None or string == ""):
message = ""
else:
message = string
soc.send(b'--quit--')
def start_server(host, port):
host = host
port = int(port) # arbitrary non-privileged port
soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# SO_REUSEADDR flag tells the kernel to reuse a local socket in TIME_WAIT state, without waiting for its natural timeout to expire
soc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print("Socket created")
try:
soc.bind((host, port))
except:
print("Bind failed. Error : " + str(sys.exc_info()))
sys.exit()
soc.listen(5) # queue up to 5 requests
print("Socket now listening")
# infinite loop- do not reset for every requests
while True:
connection, address = soc.accept()
ip, port = str(address[0]), str(address[1])
print("Connected with " + ip + ":" + port)
try:
threading.Thread(target=client_thread, args=(connection, ip, port)).start()
except:
print("Thread did not start.")
traceback.print_exc()
soc.close()
def client_thread(connection, ip, port, max_buffer_size=BUFFER_SIZE):
is_active = True
while is_active:
client_input = receive_input(connection, max_buffer_size)
if "--QUIT--" in client_input:
print("Client is requesting to quit")
connection.close()
print("Connection " + ip + ":" + port + " closed")
is_active = False
elif not client_input == "":
print("{}".format(client_input))
connection.sendall("-".encode("utf8"))
else:
connection.sendall("-".encode("utf8"))
def receive_input(connection, max_buffer_size):
client_input = connection.recv(max_buffer_size)
client_input_size = sys.getsizeof(client_input)
if client_input_size > max_buffer_size:
print("The input size is greater than expected {}".format(client_input_size))
decoded_input = client_input.decode("utf8").rstrip() # decode and strip end of line
result = process_input(decoded_input)
return result
def process_input(input_str):
return str(input_str).upper()
if __name__ == '__main__':
print('HOST START (inner - as main).')
main(sys.argv[1:])
Edit-2: Furthermore thoughts
Having direct control of the subprocess' console input pipe/buffer would be the preferable solution to this problem. For this is the bounty of 500 Reputation.
Unfortunately I'm running out of time. Therefore I might use one of those workarounds for now and replace them with the proper solution later. Or maybe I have to use the nuclear option, just one console, where the ongoing log output is paused during any user keyboard input, and printed afterwards. Of course this might lead to buffer problems, when the user decides to type something just half the way.
Edit-3: Code including the accepted answer (one file)
With the answer from James Kent I get the desired behavior, when I start a script with the code via the Windows command line (cmd) or PowerShell. However, when I start this same script via Eclipse/PyDev with "Python run", then the output is always printed on the main Eclipse/PyDev console, while the second console of the subprocess remains empty and stays inactive. Though, I guess this is another system/environment speciality and a different issue.
from sys import argv, stdin, stdout
from threading import Thread
from cmd import Cmd
from time import sleep
from datetime import datetime
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
INTRO = 'command line'
PROMPT = '> '
class CommandLine(Cmd):
"""Custom console"""
def __init__(self, subprocess, intro=INTRO, prompt=PROMPT):
Cmd.__init__(self)
self.subprocess = subprocess
self.intro = intro
self.prompt = prompt
self.doc_header = intro
self.running = False
def do_date(self, args):
"""Prints the current date and time."""
print(datetime.now())
sleep(1)
def do_exit(self, args):
"""Exits this command line application."""
print("Exit by user command.")
if self.subprocess is not None:
try:
self.subprocess.terminate()
except:
self.subprocess.kill()
exit()
class Console():
def __init__(self):
if '-r' not in argv:
self.p = Popen(
['python.exe', __file__, '-r'],
stdin=PIPE,
creationflags=CREATE_NEW_CONSOLE
)
else:
while True:
data = stdin.read(1)
if not data:
# break
sleep(1)
continue
stdout.write(data)
def write(self, data):
self.p.stdin.write(data.encode('utf8'))
self.p.stdin.flush()
def getSubprocess(self):
if self.p:
return self.p
else:
return None
class Feeder (Thread):
def __init__(self, console):
self.console = console
Thread.__init__(self)
def run(self):
feeding(self.console)
def feeding(console):
for i in range(0, 100):
console.write('test %i\n' % i)
sleep(1)
if __name__ == '__main__':
p = Console()
if '-r' not in argv:
thread = Feeder(p)
thread.setDaemon(True)
thread.start()
cl = CommandLine(subprocess=p.getSubprocess())
cl.use_rawinput = False
cl.prompt = PROMPT
cl.cmdloop('\nCommand line is waiting for user input (e.g. help).')
Edit-3: Honorable mentions
In the questions's text above I have mentioned using the ctypes library for directly accessing the Windows console API as another workround (under "Edit-1: More thoughts"). Or using just one console in a way, that the input prompt always stays at the bottom as nuclear option to this entire problem. (under "Edit-2: Furthermore thoughts")
For using the ctypes library I would have oriented myself on the following answer to Change console font in Windows. And for using just one console I would have tried the following answer to Keep console input line below output. I think both of these answers may offer potential merrit regarding this problem and maybe they are helpful to others how come accross this post. Also, I if i find the time, I will try if they work somehow.
The issue you're up against is the architecture of the console subsystem on Windows, the console window that you normally see is not hosted by cmd.exe but instead by conhost.exe, a child process of a conhost window can only connect to a single conhost instance meaning you're limited to a single window per process.
This then leads on to having an extra process for each console window you wish to have, then in order to look at displaying anything in that window you need to look at how stdin and stdout are normally handled, in that they are written and read from by the conhost instance, except if you turn stdin into a pipe (so you can write to the process) it no longer comes from conhost but instead from your parent process and as such conhost has no visibility of it. This means that anything written to stdin is only read by the child process so is not displayed by conhost.
As far as I know there isn't a way to share the pipe like that.
As a side effect if you make stdin a pipe then all keyboard input sent to the new console window goes nowhere, as stdin is not connected to that window.
For an output only function this means you can spawn a new process that communicates with the parent via a pipe to stdin and echos everything to stdout.
Heres an attempt:
#!python3
import sys, subprocess, time
class Console():
def __init__(self):
if '-r' not in sys.argv:
self.p = subprocess.Popen(
['python.exe', __file__, '-r'],
stdin=subprocess.PIPE,
creationflags=subprocess.CREATE_NEW_CONSOLE
)
else:
while True:
data = sys.stdin.read(1)
if not data:
break
sys.stdout.write(data)
def write(self, data):
self.p.stdin.write(data.encode('utf8'))
self.p.stdin.flush()
if (__name__ == '__main__'):
p = Console()
if '-r' not in sys.argv:
for i in range(0, 100):
p.write('test %i\n' % i)
time.sleep(1)
So a nice simple pipe between two processes and echoing the input back to the output if its the subprocess, I used a -r to signify whether the instance is a process but there are other ways depending on how you implement it.
Several things to note:
the flush after writing to stdin is needed as python normally uses buffering.
the way this approach is written is aimed at being in its own module hence the use of __file__
due to the use of __file__ this approach may need modification if frozen using cx_Freeze or similar.
EDIT 1
for a version that can be frozen with cx_Freeze:
Console.py
import sys, subprocess
class Console():
def __init__(self, ischild=True):
if not ischild:
if hasattr(sys, 'frozen'):
args = ['Console.exe']
else:
args = [sys.executable, __file__]
self.p = subprocess.Popen(
args,
stdin=subprocess.PIPE,
creationflags=subprocess.CREATE_NEW_CONSOLE
)
else:
while True:
data = sys.stdin.read(1)
if not data:
break
sys.stdout.write(data)
def write(self, data):
self.p.stdin.write(data.encode('utf8'))
self.p.stdin.flush()
if (__name__ == '__main__'):
p = Console()
test.py
from Console import Console
import sys, time
if (__name__ == '__main__'):
p = Console(False)
for i in range(0, 100):
p.write('test %i\n' % i)
time.sleep(1)
setup.py
from cx_Freeze import setup, Executable
setup(
name = 'Console-test',
executables = [
Executable(
'Console.py',
base=None,
),
Executable(
'test.py',
base=None,
)
]
)
EDIT 2
New version that should work under dev tools like IDLE
Console.py
#!python3
import ctypes, sys, subprocess
Kernel32 = ctypes.windll.Kernel32
class Console():
def __init__(self, ischild=True):
if ischild:
# try allocate new console
result = Kernel32.AllocConsole()
if result > 0:
# if we succeed open handle to the console output
sys.stdout = open('CONOUT$', mode='w')
else:
# if frozen we assume its names Console.exe
# note that when frozen 'Win32GUI' must be used as a base
if hasattr(sys, 'frozen'):
args = ['Console.exe']
else:
# otherwise we use the console free version of python
args = ['pythonw.exe', __file__]
self.p = subprocess.Popen(
args,
stdin=subprocess.PIPE
)
return
while True:
data = sys.stdin.read(1)
if not data:
break
sys.stdout.write(data)
def write(self, data):
self.p.stdin.write(data.encode('utf8'))
self.p.stdin.flush()
if (__name__ == '__main__'):
p = Console()
test.py
from Console import Console
import sys, time
if (__name__ == '__main__'):
p = Console(False)
for i in range(0, 100):
p.write('test %i\n' % i)
time.sleep(1)
setup.py
from cx_Freeze import setup, Executable
setup(
name = 'Console-test',
executables = [
Executable(
'Console.py',
base='Win32GUI',
),
Executable(
'test.py',
base=None,
)
]
)
This could be made more robust, i.e. always checking for an existing console and detaching it if found before creating a new console, and possibly better error handling.
Since you are on windows you can use win32console module to open a second console or multiple consoles for your thread or subprocess output. This is the most simple and easiest way that works if you are on windows.
Here is a sample code:
import win32console
import multiprocessing
def subprocess(queue):
win32console.FreeConsole() #Frees subprocess from using main console
win32console.AllocConsole() #Creates new console and all input and output of subprocess goes to this new console
while True:
print(queue.get())
#prints any output produced by main script passed to subprocess using queue
if __name__ == "__main__":
queue = multiprocessing.Queue()
multiprocessing.Process(target=subprocess, args=[queue]).start()
while True:
print("Hello World in main console")
queue.put("Hello work in sub process console")
#sends above string to subprocess and it prints it into its console
#and whatever else you want to do in ur main process
You can also do this with threading. You have to use queue module if you want the queue functionality as threading module doesn't have queue
Here is the win32console module documentation

Python - check subprocess activity every n seconds

I'm trying to make python script (currently on windows) which will open some sub-processes (which will run infinitely) and script should periodically check do all of opened sub-processes still work correctly. So it should be done with while loop, I guess.
The sub-processes are about FFMPEG livestreaming.
The problem is when I do time.sleep(n) in my loop, because then every FFMPEG livestream stops, so I suppose time.sleep affect on all of child subprocesses.
I have no idea how to make it work.
Here is my python code:
import os, time, sys, datetime, smtplib, configparser, logging, subprocess, psutil
import subprocess
def forwardudpstream(channel_number, ip_input, ip_output):
try:
ffmpeg_command = 'ffmpeg -i udp://' + ip_input + ' -vcodec copy -acodec copy -f mpegts "udp://' + ip_output + '?pkt_size=1316"'
ffmpeg_output = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=False)
return str(ffmpeg_output.pid)
except:
print ("Exception!")
return '0'
while True:
configuration = 'config.ini'
channel_list_file = 'CHANNEL_LIST.conf'
pid_folder = "D:\\Forward_UDP_Stream\\pids\\"
channel_list = [line.rstrip('\n') for line in open(channel_list_file)]
for line in channel_list:
if not line.startswith('#') and ('|' in line):
channel_number, ip_input, ip_output = line.split('|')
print('----------')
print("Channel number = ", channel_number)
print("IP Input = ", ip_input)
print("IP Output = ", ip_output)
pid_file_found = False
print("Checking if pid file exists...")
for pidfile in os.listdir(pid_folder):
if pidfile.startswith(channel_number + '-'):
print("Pid file is found for this channel.")
pid_file_found = True
pid = int(pidfile.split('-')[1].split('.')[0])
print("PID = ", str(pid))
print("Checking if corresponding process is active...")
if not psutil.pid_exists(pid):
print("Process is not active.")
print("Removing old pid file.")
os.remove(pid_folder + pidfile)
print("Starting a new process...")
pid_filename = channel_number + '-' + forwardudpstream(channel_number, ip_input, ip_output) + '.pid'
pid_file = open(pid_folder + pid_filename, "a")
pid_file.write("Process is running.")
pid_file.close()
else:
print("Process is active!")
break
if pid_file_found == False:
print("Pid file is not found. Starting a new process and creating pid file...")
pid_filename = channel_number + '-' + forwardudpstream(channel_number, ip_input, ip_output) + '.pid'
pid_file = open(pid_folder + pid_filename, "a")
pid_file.write("Process is running.")
pid_file.close()
time.sleep(10)
Here is my CHANNEL_LIST.conf file example:
1|239.1.1.1:10000|239.1.1.2:10000
2|239.1.1.3:10000|239.1.1.4:10000
Perhaps there is some other solution to put waiting and sub-processes to work together. Does anyone have an idea?
UPDATE:
I finally make it work when I removed stdout=subprocess.PIPE part from the subprocess command.
Now it looks like this:
ffmpeg_output = subprocess.Popen(ffmpeg_command, stderr=subprocess.STDOUT, shell=False)
So now I'm confused why previous command was making a problem...?
Any explanation?

How to execute command on linux plateform with different thread in python?

I want to execute two separate commands on command prompt and read command's output in python. My approach is, I want to execute these command on certain time interval i.e. after x seconds.
I have two commands say command1 and command2. command1 is taking maximum 30 seconds to print it's output on console. command2 is taking maximum 10 seconds to print it't output on console.
I want to execute this command1 and command2 with new thread every time (time interval) i.e. after every x seconds
program code -
import os,sys
import thread,threading
import time
def read_abc_data():
with open("abc.txt", "a") as myfile:
output = os.popen('command1').read()
myfile.write(output +"\n\n")
def abc(threadName):
while True:
threading.Thread(target = read_abc_data).start()
time.sleep(10)
def read_pqr_data():
with open("pqr.txt", "a") as myfile:
output = os.popen('command2').read()
myfile.write(output +"\n\n")
def pqr(threadName):
while True:
threading.Thread(target = read_pqr_data).start()
time.sleep(10)
if __name__ == "__main__":
try:
thread.start_new_thread( abc, ("Thread-1", ) )
thread.start_new_thread( pqr, ("Thread-2", ) )
except:
print "Error: unable to start thread"
while 1:
pass
Currently I have given 10 seconds sleep (delay) to execute read_abc_data() and read_pqr_data() functions. after executing this program for 1 minute I'm getting abc.txt file as empty. I think the reason is command1 didn't provide complete output in 10 seconds. right?
I want abc.txt and pqr.txt files with commands output as data in that. Is I'm missing something?
As commands output while running, so you'd better get outputs streamingly. It can be done with subprocess and readline:
import subprocess
def read_abc_data():
with open("abc.txt", "a") as myfile:
process = subprocess.Popen('command1', stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, ''):
myfile.write(line)
Try this code. It uses locks to lock the file resource during updates, and uses a single function that gets executed as part of thread execution.
import time
import subprocess
import threading
from thread import start_new_thread
command1 = "ls"
command2 = "date"
file_name1 = "/tmp/one"
file_name2 = "/tmp/two"
def my_function(command, file_name, lock):
process_obj = subprocess.Popen(command, stdout=subprocess.PIPE)
command_output, command_error = process_obj.communicate()
print command_output
lock.acquire()
with open(file_name, 'a+') as f:
f.write(command_output)
print 'Writing'
lock.release()
if __name__ == '__main__':
keep_running = True
lock1 = threading.Lock()
lock2 = threading.Lock()
while keep_running:
try:
start_new_thread(my_function, (command1, file_name1, lock1))
start_new_thread(my_function, (command2, file_name2, lock2))
time.sleep(10)
except KeyboardInterrupt, e:
keep_running = False

How to get the last N lines of a subprocess' stderr stream output?

I am a Python newbie writing a Python (2.7) script that needs to exec a number of external applications, one of which writes a lot of output to its stderr stream. What I am trying to figure out is a concise and succinct way (in Python) to get the last N lines from that subprocess' stderr output stream.
Currently, I am running that external application from my Python script like so:
p = subprocess.Popen('/path/to/external-app.sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print "ERROR: External app did not complete successfully (error code is " + str(p.returncode) + ")"
print "Error/failure details: ", stderr
status = False
else:
status = True
I'd like to capture the last N lines of output from its stderr stream so that they can be written to a log file or emailed, etc.
N = 3 # for 3 lines of output
p = subprocess.Popen(['/path/to/external-app.sh'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print ("ERROR: External app did not complete successfully "
"(error code is %s)" % p.returncode)
print "Error/failure details: ", '\n'.join(stderr.splitlines()[-N:])
status = False
else:
status = True
If the whole output can't be stored in RAM then:
import sys
from collections import deque
from subprocess import Popen, PIPE
from threading import Thread
ON_POSIX = 'posix' in sys.builtin_module_names
def start_thread(func, *args):
t = Thread(target=func, args=args)
t.daemon = True
t.start()
return t
def consume(infile, output):
for line in iter(infile.readline, ''):
output(line)
infile.close()
p = Popen(['cat', sys.argv[1]], stdout=PIPE, stderr=PIPE,
bufsize=1, close_fds=ON_POSIX)
# preserve last N lines of stdout, print stderr immediately
N = 100
queue = deque(maxlen=N)
threads = [start_thread(consume, *args)
for args in (p.stdout, queue.append), (p.stderr, sys.stdout.write)]
for t in threads: t.join() # wait for IO completion
print ''.join(queue), # print last N lines
retcode = p.wait()

Categories