redirect subprocess log to wxpython txt ctrl - python

I would like to capture log o/p from a python based subprocess . Here is part of my code. How do I ridirect my log also to this txt ctrl
Here is mytest.py:
import logging
log=logging.getLogger('test')
class MyTestClass():
def TestFunction(self) :
log.info("start function"
# runs for 5 - 10 mins and has lots of log statments
print "some stuff"
log.info("after Test Function")
# for now
return a,b
#sys.exit(2)
if __name__ == "__main__":
myApp=MyTestClass()
myApp.TestFunction()
I am doing something of this sort in my maingui:
class WxLog(logging.Handler):
def __init__(self, ctrl):
logging.Handler.__init__(self)
self.ctrl = ctrl
def emit(self, record):
if self.ctrl:
self.ctrl.AppendText(self.format(record)+"\n")
and in my gui
self.log = wx.TextCtrl(self, -1, "", style=wx.TE_MULTILINE| wx.TE_RICH2)
#logging.basicConfig(level=logging.INFO)
self.logr = logging.getLogger('')
self.logr.setLevel(logging.INFO)
hdlr = WxLog(self.log)
hdlr.setFormatter(logging.Formatter('%(message)s '))
self.logr.addHandler(hdlr)
#snip
prog = os.path.join(mydir,"mytest.py")
params = [sys.executable,prog]
# Start the subprocess
outmode = subprocess.PIPE
errmode = subprocess.STDOUT
self._proc = subprocess.Popen(params,
stdout=outmode,
stderr=errmode,
shell=True
)
# Read from stdout while there is output from process
while self._proc.poll() == None:
txt = self._proc.stdout.readline()
print txt
# also direct log to txt ctrl
txt = 'Return code was ' + str(self._proc.returncode) +'\n'
# direct
self.logr.info("On end ")

You can try following the suggestion in this post.
Update: You can set the logger in the subprocess to use a SocketHandler and set up a socket server in the GUI to listen for messages from the subprocess, using the technique in the linked-to post to actually make things appear in the GUI. A working socket server is included in the logging documentation.

I wrote an article about how I redirect a few things like ping and traceroute using subprocess to my TextCtrl widget here: http://www.blog.pythonlibrary.org/2010/06/05/python-running-ping-traceroute-and-more/
That might help you figure it out. Here's a more generic article that doesn't use subprocess: http://www.blog.pythonlibrary.org/2009/01/01/wxpython-redirecting-stdout-stderr/
I haven't tried redirecting with the logging module yet, but that may be something I'll do in the future.

Related

Display two windows at the same time for python [duplicate]

Apart from the scripts own console (which does nothing) I want to open two consoles and print the variables con1 and con2 in different consoles, How can I achieve this.
con1 = 'This is Console1'
con2 = 'This is Console2'
I've no idea how to achieve this and spent several hours trying to do so with modules such as subprocess but with no luck. I'm on windows by the way.
Edit:
Would the threading module do the job? or is multiprocessing needed?
Eg:
If you don't want to reconsider your problem and use a GUI such as in #Kevin's answer then you could use subprocess module to start two new consoles concurrently and display two given strings in the opened windows:
#!/usr/bin/env python3
import sys
import time
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
messages = 'This is Console1', 'This is Console2'
# open new consoles
processes = [Popen([sys.executable, "-c", """import sys
for line in sys.stdin: # poor man's `cat`
sys.stdout.write(line)
sys.stdout.flush()
"""],
stdin=PIPE, bufsize=1, universal_newlines=True,
# assume the parent script is started from a console itself e.g.,
# this code is _not_ run as a *.pyw file
creationflags=CREATE_NEW_CONSOLE)
for _ in range(len(messages))]
# display messages
for proc, msg in zip(processes, messages):
proc.stdin.write(msg + "\n")
proc.stdin.flush()
time.sleep(10) # keep the windows open for a while
# close windows
for proc in processes:
proc.communicate("bye\n")
Here's a simplified version that doesn't rely on CREATE_NEW_CONSOLE:
#!/usr/bin/env python
"""Show messages in two new console windows simultaneously."""
import sys
import platform
from subprocess import Popen
messages = 'This is Console1', 'This is Console2'
# define a command that starts new terminal
if platform.system() == "Windows":
new_window_command = "cmd.exe /c start".split()
else: #XXX this can be made more portable
new_window_command = "x-terminal-emulator -e".split()
# open new consoles, display messages
echo = [sys.executable, "-c",
"import sys; print(sys.argv[1]); input('Press Enter..')"]
processes = [Popen(new_window_command + echo + [msg]) for msg in messages]
# wait for the windows to be closed
for proc in processes:
proc.wait()
You can get something like two consoles using two Tkinter Text widgets.
from Tkinter import *
import threading
class FakeConsole(Frame):
def __init__(self, root, *args, **kargs):
Frame.__init__(self, root, *args, **kargs)
#white text on black background,
#for extra versimilitude
self.text = Text(self, bg="black", fg="white")
self.text.pack()
#list of things not yet printed
self.printQueue = []
#one thread will be adding to the print queue,
#and another will be iterating through it.
#better make sure one doesn't interfere with the other.
self.printQueueLock = threading.Lock()
self.after(5, self.on_idle)
#check for new messages every five milliseconds
def on_idle(self):
with self.printQueueLock:
for msg in self.printQueue:
self.text.insert(END, msg)
self.text.see(END)
self.printQueue = []
self.after(5, self.on_idle)
#print msg to the console
def show(self, msg, sep="\n"):
with self.printQueueLock:
self.printQueue.append(str(msg) + sep)
#warning! Calling this more than once per program is a bad idea.
#Tkinter throws a fit when two roots each have a mainloop in different threads.
def makeConsoles(amount):
root = Tk()
consoles = [FakeConsole(root) for n in range(amount)]
for c in consoles:
c.pack()
threading.Thread(target=root.mainloop).start()
return consoles
a,b = makeConsoles(2)
a.show("This is Console 1")
b.show("This is Console 2")
a.show("I've got a lovely bunch of cocounts")
a.show("Here they are standing in a row")
b.show("Lorem ipsum dolor sit amet")
b.show("consectetur adipisicing elit")
Result:
I don't know if it suits you, but you can open two Python interpreters using Windows start command:
from subprocess import Popen
p1 = Popen('start c:\python27\python.exe', shell=True)
p2 = Popen('start c:\python27\python.exe', shell=True)
Of course there is problem that now Python runs in interactive mode which is not what u want (you can also pass file as parameter and that file will be executed).
On Linux I would try to make named pipe, pass the name of the file to python.exe and write python commands to that file. 'Maybe' it will work ;)
But I don't have an idea how to create named pipe on Windows. Windows API ... (fill urself).
pymux
pymux gets close to what you want: https://github.com/jonathanslenders/pymux
Unfortunately it is mostly a CLI tool replacement for tmux and does not have a decent programmatic API yet.
But hacking it up to expose that API is likely the most robust option if you are serious about this.
The README says:
Parts of pymux could become a library, so that any prompt_toolkit application can embed a vt100 terminal. (Imagine a terminal emulator embedded in pyvim.)
If you are on windows you can use win32console module to open a second console for your thread or subprocess output. This is the most simple and easiest way that works if you are on windows.
Here is a sample code:
import win32console
import multiprocessing
def subprocess(queue):
win32console.FreeConsole() #Frees subprocess from using main console
win32console.AllocConsole() #Creates new console and all input and output of subprocess goes to this new console
while True:
print(queue.get())
#prints any output produced by main script passed to subprocess using queue
queue = multiprocessing.Queue()
multiprocessing.Process(Target=subprocess, args=[queue]).start()
while True:
print("Hello World")
#and whatever else you want to do in ur main process
You can also do this with threading. You have to use queue module if you want the queue functionality as threading module doesn't have queue
Here is the win32console module documentation
I used jfs' response. Here is my embellishment/theft of jfs response.
This is tailored to run on Win10 and also handles Unicode:
# https://stackoverflow.com/questions/19479504/how-can-i-open-two-consoles-from-a-single-script
import sys, time, os, locale
from subprocess import Popen, PIPE, CREATE_NEW_CONSOLE
class console(Popen) :
NumConsoles = 0
def __init__(self, color=None, title=None):
console.NumConsoles += 1
cmd = "import sys, os, locale"
cmd += "\nos.system(\'color " + color + "\')" if color is not None else ""
title = title if title is not None else "console #" + str(console.NumConsoles)
cmd += "\nos.system(\"title " + title + "\")"
# poor man's `cat`
cmd += """
print(sys.stdout.encoding, locale.getpreferredencoding() )
endcoding = locale.getpreferredencoding()
for line in sys.stdin:
sys.stdout.buffer.write(line.encode(endcoding))
sys.stdout.flush()
"""
cmd = sys.executable, "-c", cmd
# print(cmd, end="", flush=True)
super().__init__(cmd, stdin=PIPE, bufsize=1, universal_newlines=True, creationflags=CREATE_NEW_CONSOLE, encoding='utf-8')
def write(self, msg):
self.stdin.write(msg + "\n" )
if __name__ == "__main__":
myConsole = console(color="c0", title="test error console")
myConsole.write("Thank you jfs. Cool explanation")
NoTitle= console()
NoTitle.write("default color and title! This answer uses Windows 10")
NoTitle.write(u"♥♥♥♥♥♥♥♥")
NoTitle.write("♥")
time.sleep(5)
myConsole.terminate()
NoTitle.write("some more text. Run this at the python console.")
time.sleep(4)
NoTitle.terminate()
time.sleep(5)
Do you know about screen/tmux?
How about tmuxp? For example, you can try to run cat in split panes and use "sendkeys" to send output (but dig the docs, may be there is even easier ways to achieve this).
As a side bonus this will work in the text console or GUI.

python-daemon and logging: set logging level interactively

I have a python-daemon process that logs to a file via a ThreadedTCPServer (inspired by the cookbook example: https://docs.python.org/2/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network, as I will have many such processes writing to the same file). I am controlling the spawning of the daemon process using subprocess.Popen from an ipython console, and this is how the application will be run. I am able to successfully write to the log file from both the main ipython process, as well as the daemon process, but I am unable to change the level of both by just simply setting the level of the root logger in ipython. Is this something that should be possible? Or will it require custom functionality to set the logging.level of the daemon separately?
Edit: As requested, here is an attempt to provide a pseudo-code example of what I am trying to achieve. I hope that this is a sufficient description.
daemon_script.py
import logging
import daemon
from other_module import function_to_run_as_daemon
class daemon(object):
def __init__(self):
self.daemon_name = __name__
logging.basicConfig() # <--- required, or I don't get any log messages
self.logger = logging.getLogger(self.daemon_name)
self.logger.debug( "Created logger successfully" )
def run(self):
with daemon.daemonContext( files_preserve = [self.logger.handlers[0].stream] )
self.logger.debug( "Daemonised successfully - about to enter function" )
function_to_run_as_daemon()
if __name__ == "__main__":
d = daemon()
d.run()
Then in ipython i would run something like
>>> import logging
>>> rootlogger = logging.getLogger()
>>> rootlogger.info( "test" )
INFO:root:"test"
>>> subprocess.Popen( ["python" , "daemon_script.py"] )
DEBUG:__main__:"Created logger successfully"
DEBUG:__main__:"Daemonised successfully - about to enter function"
# now i'm finished debugging and testing, i want to reduce the level for all the loggers by changing the level of the handler
# Note that I also tried changing the level of the root handler, but saw no change
>>> rootlogger.handlers[0].setLevel(logging.INFO)
>>> rootlogger.info( "test" )
INFO:root:"test"
>>> print( rootlogger.debug("test") )
None
>>> subprocess.Popen( ["python" , "daemon_script.py"] )
DEBUG:__main__:"Created logger successfully"
DEBUG:__main__:"Daemonised successfully - about to enter function"
I think that I may not be approaching this correctly, but, its not clear to me what would work better. Any advice would be appreciated.
The logger you create in your daemon won't be the same as the logger you made in ipython. You could test this to be sure, by just printing out both logger objects themselves, which will show you their memory addresses.
I think a better pattern would be be that you pass if you want to be in "debug" mode or not, when you run the daemon. In other words, call popen like this:
subprocess.Popen( ["python" , "daemon_script.py", "debug"] )
It's up to you, you could pass a string meaning "debug mode is on" as above, or you could pass the log level constant that means "debug", e.g.:
subprocess.Popen( ["python" , "daemon_script.py", "10"] )
(https://docs.python.org/2/library/logging.html#levels)
Then in the daemon's init function use argv for example, to get that argument and use it:
...
import sys
def __init__(self):
self.daemon_name = __name__
logging.basicConfig() # <--- required, or I don't get any log messages
log_level = int(sys.argv[1]) # Probably don't actually just blindly convert it without error handling
self.logger = logging.getLogger(self.daemon_name)
self.logger.setLevel(log_level)
...

How & where to best retrieve sudo password via a native GUI on a macOS Python-based app - (while maintaining an interactive output stream (stdout))

Ok, so the situation is this: I am building a macOS GUI App using Python and wx (wxphoenix). The user can use the GUI (say: script1) to launch a file-deletion process (contained in script2). In order to run successfully script2 needs to run with sudo rights.
script2 will itterate over a long list of files and delete them. But I need it to communicate with the GUI contained in script1 after each round so that script1 can update the progressbar.
In it's absolute most basic form my current working setup looks like this:
Script1:
import io
from threading import Thread
import subprocess
import wx
# a whole lot of wx GUI stuff
def get_password():
"""Retrieve user password via a GUI"""
# A wx solution using wx.PasswordEntryDialog()
# Store password in a variable
return variable
class run_script_with_sudo(Thread):
"""Launch a script with administrator privileges"""
def __init__(self, path_to_script, wx_pubsub_sendmessage):
"""Set variables to self"""
self.path = path_to_script
self.sender = wx_pubsub_sendmessage
self.password = get_password()
Thread.__init__(self)
self.start()
def run(self):
"""Run thread"""
prepare_script = subprocess.Popen(["echo", password], stdout=subprocess.PIPE)
prepare_script.wait()
launch_script = subprocess.Popen(['sudo', '-S', '/usr/local/bin/python3.6', '-u', self.path], stdin=prepare_script.stdout, stdout=subprocess.PIPE)
for line in io.TextIOWrapper(launch_script.stdout, encoding="utf-8"):
print("Received line: ", line.rstrip())
# Tell progressbar to add another step:
wx.CallAfter(self.sender, "update", msg="")
Script2:
import time
# This is a test setup, just a very simple loop that produces an output.
for i in range(25):
time.sleep(1)
print(i)
The above setup works in that script1 receives the output of script2 in real-time and acts on it. (So in the given example: after each second script1 adds another step to the progress bar until it reaches 25 steps).
What I want to achieve = not storing the password in a variable and using macOS it's native GUI to retrieve the password.
However when I change:
prepare_script = subprocess.Popen(["echo", password], stdout=subprocess.PIPE)
prepare_script.wait()
launch_script = subprocess.Popen(['sudo', '-S', '/usr/local/bin/python3.6', '-u', self.path], stdin=prepare_script.stdout, stdout=subprocess.PIPE)
for line in io.TextIOWrapper(launch_script.stdout, encoding="utf-8"):
print("Received line: ", line.rstrip())
# Tell progressbar to add another step:
wx.CallAfter(self.sender, "update", msg="")
Into:
command = r"""/usr/bin/osascript -e 'do shell script "/usr/local/bin/python3.6 -u """ + self.path + """ with prompt "Sart Deletion Process " with administrator privileges'"""
command_list = shlex.split(command)
launch_script = subprocess.Popen(command_list, stdout=subprocess.PIPE)
for line in io.TextIOWrapper(launch_script.stdout, encoding="utf-8"):
print("Received line: ", line.rstrip())
# Tell progressbar to add another step:
wx.CallAfter(self.sender, "update", msg="")
It stops working because osascript apparently runs in a non-interactive shell. This means script2 doesn't sent any output until it is fully finished, causing the progress bar in script1 to stall.
My question thus becomes: How can I make sure to use macOS native GUI to ask for the sudo password, thus preventing having to store it in a variable, while still maintaining the possibility to catch the stdout from the privileged script in an interactive / real-time stream.
Hope that makes sense.
Would appreciate any insights!
My question thus becomes: How can I make sure to use macOS native GUI
to ask for the sudo password, thus preventing having to store it in a
variable, while still maintaining the possibility to catch the stdout
from the privileged script in an interactive / real-time stream.
I have found a solution myself, using a named pipe (os.mkfifo()).
That way, you can have 2 python scripts communicate with each other while 1 of them is launched with privileged rights via osascript (meaning: you get a native GUI window that asks for the users sudo password).
Working solution:
mainscript.py
import os
from pathlib import Path
import shlex
import subprocess
import sys
from threading import Thread
import time
class LaunchDeletionProcess(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
launch_command = r"""/usr/bin/osascript -e 'do shell script "/usr/local/bin/python3.6 -u /path/to/priviliged_script.py" with prompt "Sart Deletion Process " with administrator privileges'"""
split_command = shlex.split(launch_command)
print("Thread 1 started")
testprogram = subprocess.Popen(split_command)
testprogram.wait()
print("Thread1 Finished")
class ReadStatus(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
while not os.path.exists(os.path.expanduser("~/p1")):
time.sleep(0.1)
print("Thread 2 started")
self.wfPath = os.path.expanduser("~/p1")
rp = open(self.wfPath, 'r')
response = rp.read()
self.try_pipe(response)
def try_pipe(self, response):
rp = open(self.wfPath, 'r')
response = rp.read()
print("Receiving response: ", response)
rp.close()
if response == str(self.nr_of_steps-1):
print("Got to end")
os.remove(os.path.expanduser("~/p1"))
else:
time.sleep(1)
self.try_pipe(response)
if __name__ == "__main__":
thread1 = LaunchDeletionProcess()
thread2 = ReadStatus()
thread1.start()
thread2.start()
priviliged_script.py
import os
import time
import random
wfPath = os.path.expanduser("~/p1")
try:
os.mkfifo(wfPath)
except OSError:
print("error")
pass
result = 10
nr = 0
while nr < result:
random_nr = random.random()
wp = open(wfPath, 'w')
print("writing new number: ", random_nr)
wp.write("Number: " + str(random_nr))
wp.close()
time.sleep(1)
nr += 1
wp = open(wfPath, 'w')
wp.write("end")
wp.close()

How do i write the output to a file and to the stdout using pexpect module

I'm a newbie to python and have been reading and surfing the net to accomplish my task.
I'm writing a function which will do a ssh to my device, execute few commands and display the result both in terminal and into a log file.
i have written something like this:
class logger(object):
def __init__(self, filename="Default.log"):
print 'Inside Logger Class'
self.terminal = sys.stdout
self.log = open(filename, "a")
class simpleTelnet(logger):
def __init__(self):
print 'Inside simpleTelnt Constructor'
logger.__init__(self,"myfilename.txt")
self.log.write = 'Writing into the log file'
def telnetSession(self):
p=pexpect.spawn('ssh admin#<ip address>')
p.logfile = sys.stdout
p.expect('Password:')
p.sendline('password')
time.sleep(2)
p.sendline('show version | no-more')
expect(pexpect.EOF, timeout = None)
out = p.before()
self.log.write(p.logfile)
p.close()
return out
if __name__ == "__main__":
output = simpleTelnet()
cmd = output.telnetSession()
Here i'm trying to login to a device and print the output on both stdout and also write to a file. I'm able to print in the stdout and log to a file but after executing the command, although i close the spawn class with p.close(), it does not close and end the script execution. the program stays there for ever. How do i close the program after executing these commands.
.write is a method, not an attribute, so you should do variable.write("anything you want") and not `variable.write = "anything you want". Python wouldn't be able to change the content if you do not call a function.
So instead of doing:
class simpleTelnet(logger):
def __init__(self):
print 'Inside simpleTelnt Constructor'
logger.__init__(self,"myfilename.txt")
self.log.write = 'Writing into the log file
You'd do:
class simpleTelnet(logger):
def __init__(self):
print 'Inside simpleTelnt Constructor'
logger.__init__(self,"myfilename.txt")
self.log.write('Writing into the log file')
As the other user pointed out, you have the same line of code but with the correct syntax up there 6 lines above. You have to revise your code before posting a question. Try to do so next time.

Running Python-script in thread and redirecting std.out/std.err to wx.TextCtrl in GUI

I'm trying to write a GUI that reads in settings for a python-script, then generates the script and runs it. The script can take dozens of minutes to run so in order to not block the GUI and frustrate the user I'm running it in a separate thread. Before I did this I used a separate class to redirect the std.out and std.err of the program to a TextCtrl. This worked fine except for the GUI getting blocked during execution.
Running the script from the thread with the redirection-class still blocks the GUI. In order not to block the GUI I need to turn the redirection off. All std.out/err from both the script and the gui then goes into the console.
Here is the class that redirects and how I call it.
# For redirecting stdout/stderr to txtctrl.
class RedirectText(object):
def __init__(self,aWxTextCtrl):
self.out=aWxTextCtrl
def write(self,string):
self.out.WriteText(string)
self.redir=RedirectText(self.bottom_text)
sys.stdout=self.redir
sys.stderr=self.redir
sys.stdin=self.redir
I've tried using some kind of a communication class from the thread to the GUI without success. That is, the GUI still gets blocked.
Does anyone have some hints or a solution for this problem, that is to get the stdout/err from the script to the GUI without blocking the GUI?
Yeah. From the thread, use wx.CallAfter to send the text to the GUI to a thread-safe way. Then it can take the text and display it. Another way to do it would be to use subprocess and communicate with that. There's an example of that here:
http://www.blog.pythonlibrary.org/2010/06/05/python-running-ping-traceroute-and-more/
There are also some methods listed in the comments of this article:
http://www.blog.pythonlibrary.org/2009/01/01/wxpython-redirecting-stdout-stderr/
Unfortunately, my commenting system at that time didn't do a good job with indentation.
Another solution I have used with success would be to use python logging instead of stdout/stderr. In order to do that, you write a subclass that extends logging.Handler, to customize the font and the text color to be presented in a wx.TextCtrl in your wx application:
import logging
from logging import Handler
class WxHandler(Handler):
def __init__(self, logCtrl):
"""
Initialize the handler.
logCtrl = an instance of wx.TextCtrl
"""
self.logCtrl = logCtrl
Handler.__init__(self)
def flush(self):
pass
def emit(self, record):
"""
Emit a record.
If a formatter is specified, it is used to format the record.
The record is then written to the stream with a trailing newline. If
exception information is present, it is formatted using
traceback.print_exception and appended to the stream. If the stream
has an 'encoding' attribute, it is used to encode the message before
output to the stream.
"""
try:
lastPos = self.logCtrl.GetLastPosition()
msg = self.format(record)
self.logCtrl.WriteText(msg)
self.logCtrl.WriteText('\r\n')
f = wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Arial', wx.FONTENCODING_ISO8859_1)
if record.levelno == logging.INFO:
textColour = wx.Colour(0, 0, 205)
elif record.levelno == logging.WARN:
textColour = wx.Colour(250, 128, 114)
elif record.levelno >= logging.ERROR:
textColour = wx.Colour(220, 20, 60)
else:
textColour = wx.Colour(0, 0, 0)
self.logCtrl.SetStyle(lastPos, lastPos + len(msg), wx.TextAttr(textColour, wx.NullColour, f))
except:
self.handleError(record)
In order to configure the logger:
def configureWxLogger(logCtrl, loggingLevel):
"""
Wx Logger config
"""
logger = logging.getLogger()
logger.setLevel(loggingLevel)
ch = WxHandler(logCtrl)
formatter = logging.Formatter("%(asctime)-20s - %(levelname)-8s - %(message)s")
formatter.datefmt = '%d/%m/%Y-%H:%M:%S'
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
And, finally, to bind the text control to the log output:
self.logCtrl = wx.TextCtrl(self, -1, "", size=(600, 200), style=wx.TE_MULTILINE|wx.TE_RICH2)
wxLoggingHelper.configureWxLogger(self.logCtrl, logging.DEBUG)

Categories