peculiar error on ntpath.py - python

I am trying to write a wrapper like thing on vlc player on windows 7 so that it can load next and previous files in folder with a keystroke. What I do in it is, I take a filepath as argument make an instance of the vlc class and using pyHook, scan for keys and when specific keystrokes are detected call the play_next and play_prev methods on the instance. The methods work by killing the last process and spawning new vlc with next file found by get_new_file method. It works the first couple of times and then gives the peculiar error.
None
None
Traceback (most recent call last):
File "C:\Python27\pyHook\HookManager.py", line 351, in KeyboardSwitch
return func(event)
File "filestuff.py", line 64, in kbwrap
kbeventhandler(event,instance)
File "filestuff.py", line 11, in kbeventhandler
instance.play_prev()
File "filestuff.py", line 34, in play_prev
f=self.get_new_file(-1)
File "filestuff.py", line 40, in get_new_file
dirname= os.path.dirname(self.fn)
File "C:\Python27\lib\ntpath.py", line 205, in dirname
return split(p)[0]
File "C:\Python27\lib\ntpath.py", line 178, in split
while head2 and head2[-1] in '/\\':
TypeError: an integer is required
here is the code:
import os
import sys
import pythoncom, pyHook
import win32api
import subprocess
import ctypes
def kbeventhandler(event,instance):
if event.Key=='Home':
instance.play_prev()
if event.Key=='End':
instance.play_next()
return True
class vlc(object):
def __init__(self,filepath,vlcp):
self.fn=filepath
self.vlcpath=vlcp
self.process = subprocess.Popen([self.vlcpath, self.fn])
def kill(self):
PROCESS_TERMINATE = 1
handle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, False, self.process.pid)
ctypes.windll.kernel32.TerminateProcess(handle, -1)
ctypes.windll.kernel32.CloseHandle(handle)
print self.process.poll()
def play_next(self):
self.kill()
f=self.get_new_file(1)
self.process = subprocess.Popen([self.vlcpath, f])
self.fn=f
def play_prev(self):
self.kill()
f=self.get_new_file(-1)
self.process = subprocess.Popen([self.vlcpath, f])
self.fn=f
def get_new_file(self,switch):
dirname= os.path.dirname(self.fn)
supplist=['.mkv','.flv','.avi','.mpg','.wmv']
files = [os.path.join(dirname,f) for f in os.listdir(dirname) if (os.path.isfile(os.path.join(dirname,f)) and os.path.splitext(f)[-1]in supplist)]
files.sort()
try: currentindex=files.index(self.fn)
except: currentindex=0
i=0
if switch==1:
if currentindex<(len(files)-1):i=currentindex+1
else:
if currentindex>0:i=currentindex-1
return files[i]
def main():
vlcpath='vlc'
if os.name=='nt': vlcpath='C:/Program Files (x86)/VideoLAN/VLC/vlc.exe'
fn='H:\\Anime\\needless\\Needless_[E-D]\\[Exiled-Destiny]_Needless_Ep11v2_(04B16479).mkv'
if len(sys.argv)>1:
fn=sys.argv[1] #use argument if available or else use default file
instance=vlc(fn,vlcpath)
hm = pyHook.HookManager()
def kbwrap(event):
kbeventhandler(event,instance)
hm.KeyDown = kbwrap
hm.HookKeyboard()
pythoncom.PumpMessages()
if __name__ == '__main__':
main()
here too: http://pastebin.com/rh82XGzd

The problem was that
in main I set hm.KeyDown = kbwrap and then from function kbwrap called the actual event handler kbeventhandler but didn't return any value from kbwrap
def kbwrap(event):
return kbeventhandler(event,flag)
hm.KeyDown = kbwrap
and I also offloaded the vlc work to a different thread as pyHook wasn't playing nice with subprocess.
final working code:
import os
import sys
import pythoncom, pyHook
import win32api
import subprocess
import ctypes
import threading
from multiprocessing import *
class vlcThread(threading.Thread):
def __init__(self,filepath,vlcp,fl):
threading.Thread.__init__(self)
self.fn,self.vlcpath,self.flag=filepath,vlcp,fl
self.daemon=True
self.start() # invoke the run method
def run(self):
vlcinstance=vlc(self.fn,self.vlcpath)
while True:
if(self.flag.value==1):
vlcinstance.play_next()
self.flag.value=0
if(self.flag.value==-1):
vlcinstance.play_prev()
self.flag.value=0
def kbeventhandler(event,flag):
if event.Key=='Home':
flag.value =-1
return False
if event.Key=='End':
flag.value =1
return False
return True
class vlc(object):
def __init__(self,filepath,vlcp):
self.fn=filepath
self.vlcpath=vlcp
self.process = subprocess.Popen([self.vlcpath,self.fn],close_fds=True)
def kill(self):
p, self.process = self.process, None
if p is not None and p.poll() is None:
p.kill()
p.wait()
def play_next(self):
self.kill()
f=self.get_new_file(1)
self.process = subprocess.Popen([self.vlcpath,f],close_fds=True)
self.fn=f
def play_prev(self):
self.kill()
f=self.get_new_file(-1)
self.process = subprocess.Popen([self.vlcpath, f],close_fds=True)
self.fn=f
def get_new_file(self,switch):
dirname= os.path.dirname(self.fn)
supplist=['.mkv','.flv','.avi','.mpg','.wmv','ogm','mp4']
files = [os.path.join(dirname,f) for f in os.listdir(dirname) if (os.path.isfile(os.path.join(dirname,f)) and os.path.splitext(f)[-1]in supplist)]
files.sort()
try: currentindex=files.index(self.fn)
except: currentindex=0
i=0
if switch==1:
if currentindex<(len(files)-1):i=currentindex+1
else:
if currentindex>0:i=currentindex-1
return files[i]
def main():
vlcpath='vlc'
flag=Value('i')
flag.value=0
if os.name=='nt': vlcpath='C:/Program Files (x86)/VideoLAN/VLC/vlc.exe'
fn='H:\\Anime\\needless\\Needless_[E-D]\\[Exiled-Destiny]_Needless_Ep11v2_(04B16479).mkv'
if len(sys.argv)>1:
fn=sys.argv[1] #use argument if available or else use default file
t=vlcThread(fn,vlcpath,flag)
hm = pyHook.HookManager()
def kbwrap(event):
return kbeventhandler(event,flag)
hm.KeyDown = kbwrap
hm.HookKeyboard()
pythoncom.PumpMessages()
if __name__ == '__main__':
main()

Related

Capture real time `stdout` and `stderr` when run a function in a process python

I have a python function and want to run it as a separate process with multiprocessing package.
def run(ctx: Context):
print("hello world!")
return ctx
afterward running it as a separate process with the following script:
import multiprocessing
p = multiprocessing.Process(target=run, args=(ctx, ))
p.start()
p.join()
Now, I need to capture live stdout and stderr of the above process. Is there any way like as:
import subprocess
proc = subprocess.Popen(['python','fake_utility.py'],stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if not line:
break
But I need to pass the function not running a command with Popen. Do you know how can I read stdout when I run my function in a separate process?
My approach would be to create a custom context manager that can temporarily replace sys.stdout and sys.stderr with io.String() instances to capture the output and return this. For this you need to make the target of your Process a new function that can setup the context manager and return the results, for which a multiprocessing.Queue is used (this, by the way, would be needed anyway if you expect run to return its result back to the main process):
from multiprocessing import Process, Queue
from io import StringIO
import sys
class CaptureOutput:
def __enter__(self):
self._stdout_output = ''
self._stderr_output = ''
self._stdout = sys.stdout
sys.stdout = StringIO()
self._stderr = sys.stderr
sys.stderr = StringIO()
return self
def __exit__(self, *args):
self._stdout_output = sys.stdout.getvalue()
sys.stdout = self._stdout
self._stderr_output = sys.stderr.getvalue()
sys.stderr = self._stderr
def get_stdout(self):
return self._stdout_output
def get_stderr(self):
return self._stderr_output
def run(ctx):
print("hello world!")
print("It works!", file=sys.stderr)
raise Exception('Oh oh!') # Comment out to have a successful completion
return ctx
def worker(ctx, queue):
import traceback
with CaptureOutput() as capturer:
try:
result = run(ctx)
except Exception as e:
result = e
print(traceback.format_exc(), file=sys.stderr)
queue.put((result, capturer.get_stdout(), capturer.get_stderr()))
if __name__ == '__main__':
queue = Queue()
ctx = None # for demo purposes
p = Process(target=worker, args=(ctx, queue))
p.start()
# Must do this call before call to join:
result, stdout_output, stderr_output = queue.get()
p.join()
print('stdout:', stdout_output)
print('stderr:', stderr_output)
Prints:
stdout: hello world!
stderr: It works!
Traceback (most recent call last):
File "C:\Booboo\test\test.py", line 44, in worker
result = run(ctx)
File "C:\Booboo\test\test.py", line 36, in run
raise Exception('Oh oh!') # Comment out to have a successful completion
Exception: Oh oh!

Send file pointer to python thread and update file pointer

I have a python program with a thread and the thread should write into a file. I will spawn a thread from the main program. Now on new day trigger I will change the file pointer in the main program and I want the thread also to take the new file to write the data to the file.
I have a code which will take global variable and do this task. But is there any other better way of doing this?
#!/usr/bin/env python
import sys
import threading
import time
filePtr = None
import time
def fileWriteTh():
global filePtr
time.sleep(2)
filePtr.write("from the thrread this should in file 2")
def main():
global filePtr
filePtr = open("test1.txt","ab")
fileThread = threading.Thread(target=fileWriteTh)
fileThread.start()
if new_day_trigger:
filePtr.close()
filePtr = open("test2.txt","ab")
fileThread.join()
if __name__ == "__main__":
main()
This is the new code that is written:
#!/usr/bin/env python
import sys
import threading
import time
class SendPacket(object):
fileDesc = None
def __init__(self, fd):
super(SendPacket, self).__init__()
SendPacket.fileDesc = fd
def printFromInstance(self,var):
print var
SendPacket.fileDesc.write(var)
time.sleep(3)
print var
SendPacket.fileDesc.write(var)
def startabc(self, someVar):
self.printFromInstance(someVar)
#classmethod
def printVar(cls, printStr):
print printStr
cls.fileDesc.write(printStr)
#classmethod
def changeClsFile(cls, newFd):
cls.fileDesc = newFd
def main():
filePtr = open("test1.txt","ab")
sendPack_inst = SendPacket(filePtr)
fileThread = threading.Thread(target=sendPack_inst.startabc, args=("test1",))
fileThread.start()
time.sleep(2)
filePtr.close()
filePtr = open("test2.txt","ab")
SendPacket.changeClsFile(filePtr)
fileThread.join()
filePtr.close()
if __name__ == "__main__":
main()
Like this:
#!/usr/bin/env python
import sys
import thread
import time
class _fileACT :
def __init__(self):
self.trigger = 0
self.flag = True
self.msg = ""
self.files = (open("test1.txt","ab"),open("test2.txt","ab"))
def run(self,pssrg):
while self.flag :
if self.msg != "" :
self.files[self.trigger].write(self.msg)
self.msg = ""
def test(self,pssrg):
for i in range(20):
time.sleep(1)
if i %2 != 0 :
self.trigger = 0
elif i %2 != 1:
self.trigger = 1
self.msg = "%0.3d test-1,asdasdasd\n"%i
time.sleep(0.5)
print "wait..."
self.flag = False
for e in self.files : e.close()
print "can exit !"
if __name__ == "__main__":
fileACT = _fileACT()
thread.start_new_thread(fileACT.run,(None,))
thread.start_new_thread(fileACT.test,(None,))
We have three variables, filename, last opened file name and message. Two files, only False and True will be sufficient (of course you can use index for multiple files). We've written a test function into the class because we don't want our main cycle to freeze. The file selection is done with ' trigger ', but the previous and next file name is not the same, the previous closes.
The important point in the thread is that the time delay is strictly unavailable! The time delay is always applied to the trigger. The time delay cannot be placed in the main loop. An instance of access from outside the class is also attached. I hope it helps.

Python get 'object is not callable' with 2 threads

When i run the code below, i got an exception
# System
import time
import logging
import sys
import os
import threading
# cv2 and helper:
import cv2
class inic_thread(threading.Thread):
def __init__(self, threadID, name, counter):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.counter = counter
def run(self):
print "Starting " + self.name
if self.counter == 1: capture_continuos()
elif self.counter == 2: face_search()
def capture_continuos():
#os.system('python capture_continuos.py')
while(1):
print 'a'
def face_search():
# atributes
pool = []
path_pool = './pool/'
while(1):
pool_get = os.listdir(path_pool)
if len(pool_get) > 0:
#print(str(len(pool_get))+' images in the pool')
for image in pool_get:
print(image)
os.system('python face_search.py -i '+str(image))
else:
print('Empty Pool')
try:
capture_continuos = inic_thread(1, "capture_continuos_1", 1)
face_search_2 = inic_thread(2, "face_search_2", 2)
capture_continuos.start()
face_search_2.start()
except:
print("Error: unable to start thread")
But it don't make sense to me, because one of the threads run normal, (face_search) but the other one give this exception.
Starting capture_continuos_1
Exception in thread capture_continuos_1:
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
self.run()
File "main.py", line 44, in run
if self.counter == 1: capture_continuos()
TypeError: 'inic_thread' object is not callable
What i'm doing wrong?
I run in a Raspberry Pi 3 model B with Ubuntu MATE 14.04; Python 2.7.12
At the bottom of your script you redefine variable capture_continuos assigning thread object to it.
Also as was mentioned to terminate thread it's better to call os._exit() instead of sys.exit().

Python cmd module - Resume prompt after asynchronous event

I am maintaining an operator terminal based on cmd. The customer asked for an alerting behavior. e.g. a message shown onscreen when some asynchronous event occurs. I made a thread that periodically checks for alerts, and when it finds some, it just prints them to stdout.
This seems to work OK, but it doesn't seem very elegant, and it has a problem:
Because cmd doesn't know an alert happened, the message is followed onscreen by blank. The command prompt is not reprinted, and any user input is left pending.
Is there a better way to do asynchronous alerts during Python cmd? With the method as-is, can I interrupt cmd and get it to redraw its prompt?
I tried from my thread to poke a newline in stdin using StringIO, but this is not ideal, and I haven't gotten it work right.
Example code:
import cmd, sys
import threading, time
import io
import sys
class MyShell(cmd.Cmd):
intro = '*** Terminal ***\nType help or ? to list commands.\n'
prompt = '> '
file = None
def alert(self):
time.sleep(5)
print ('\n\n*** ALERT!\n')
sys.stdin = io.StringIO("\n")
def do_bye(self, arg):
'Stop recording, close the terminal, and exit: BYE'
print('Exiting.')
sys.exit(0)
return True
def do_async(self, arg):
'Set a five second timer to pop an alert.'
threading.Thread(target=self.alert).start()
def emptyline(self):
pass
def parse(arg):
'Convert a series of zero or more numbers to an argument tuple'
return tuple(map(int, arg.split()))
if __name__ == '__main__':
MyShell().cmdloop()
I ended up overriding Cmd.cmdloop with my own version, replacing the readlines() with my own readlines that use non-blocking terminal IO.
Non-Blocking terminal IO info here:
Non-Blocking terminal IO
Unfortunately, this opens another can trouble in that it is messy and breaks auto-completion and command history. Fortunately, the customer was OK with having to push Enter to redo the prompt, so I don't need to worry about it anymore.
Incomplete example code showing the non-blocking terminal input approach:
import cmd, sys
import threading, time
import io
import os
if os.name=='nt':
import msvcrt
def getAnyKey():
if msvcrt.kbhit():
return msvcrt.getch()
return None
else:
import sys
import select
import tty
import termios
import atexit
def isData():
return select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], [])
old_settings = termios.tcgetattr(sys.stdin)
def restoreSettings():
global old_settings
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
atexit.register(restoreSettings)
def getAnyKey():
try:
if isData():
return sys.stdin.read(1)
return None
except:
pass
return None
class MyShell(cmd.Cmd):
prompt = '> '
file = None
realstdin = sys.stdin
mocking=False
breakReadLine=False
def alert(self):
time.sleep(5)
print ('\n\n*** ALERT!\n')
self.breakReadLine=True
# ----- basic commands -----
def do_bye(self, arg):
'Stop recording, close the terminal, and exit: BYE'
print('Exiting.')
sys.exit(0)
return True
def do_async(self, arg):
'Set a five second timer to pop an alert.'
threading.Thread(target=self.alert).start()
def emptyline(self):
pass
def myReadLine(self):
sys.stdout.flush()
self.breakReadLine=False
line=''
while not self.breakReadLine:
c=getAnyKey()
if not c is None:
c=c.decode("utf-8")
if c=='\x08' and len(line):
line=line[0:-1]
elif c in ['\r','\n']:
print('\n')
return line
else:
line+=c
print(c,end='')
sys.stdout.flush()
def mycmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey+": complete")
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
print(self.prompt,end='')
line = self.myReadLine()#input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.myReadLine()#self.stdin.readline()
if not line is None:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
except ImportError:
pass
def cmdloop_with_keyboard_interrupt(self, intro):
doQuit = False
while doQuit != True:
try:
if intro!='':
cintro=intro
intro=''
self.mycmdloop(cintro)
else:
self.intro=''
self.mycmdloop()
doQuit = True
except KeyboardInterrupt:
sys.stdout.write('\n')
def parse(arg):
'Convert a series of zero or more numbers to an argument tuple'
return tuple(map(int, arg.split()))
if __name__ == '__main__':
#MyShell().cmdloop()
MyShell().cmdloop_with_keyboard_interrupt('*** Terminal ***\nType help or ? to list commands.\n')

An error with queue.get, bool object has no attribute get

#!/usr/bin/env python
#coding=utf-8
import sys,os,threading
import Queue
keyword = sys.argv[1]
path = sys.argv[2]
class keywordMatch(threading.Thread):
def __init__(self,queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
while True:
line = self.queue.get()
if keyword in line:
print line
queue.task_done()
def main():
concurrent = 100 # Number of threads
queue = Queue.Queue()
for i in range(concurrent):
t = keywordMatch(True)
t.setDaemon(True)
t.start()
allfiles = os.listdir(path)
for files in allfiles:
pathfile = os.path.join(path,files)
fp = open(pathfile)
lines = fp.readlines()
for line in lines:
queue.put(line.strip())
queue.join()
if __name__ == '__main__':
main()
This program is for searching the keyword in a directory,
but there occurs an error:
Exception in thread Thread-100:
Traceback (most recent call last):
File "/usr/local/Cellar/python/2.7.3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 551, in __bootstrap_inner
self.run()
File "du.py", line 17, in run
line = self.queue.get()
AttributeError: 'bool' object has no attribute 'get'
How can I get rid of the error?
You're instantiating the thread with t = keywordMatch(True), and then in __init__ you're taking this argument and saving it as self.queue - so naturally self.queue is going to be a bool. If you want there to be a Queue instance there, you should pass it in.
In main() you wrote:
t = keywordMatch(True)
The keywordMatch class's __init__ does this:
def __init__(self,queue):
self.queue = queue
So now self.queue is True! Later, trying to do self.queue.get fails because it isn't a queue at all.

Categories