i have written a NMap-TCP-Port-Scanner in python and everything works just fine except that i'm no longer able to see what i'm writing on the terminal.
First things first.
The code:
import argparse, nmap, sys
from threading import *
def initParser():
parser = argparse.ArgumentParser()
parser.add_argument("tgtHost", help="Specify target host")
parser.add_argument("tgtPort", help="Specify target port")
args = parser.parse_args()
return (args.tgtHost,args.tgtPort.split(","))
def nmapScan(tgtHost, tgtPorts):
nm = nmap.PortScanner()
lock = Semaphore(value=1)
for tgtPort in tgtPorts:
t = Thread(target=nmapScanThread, args=(tgtHost, tgtPort, lock, nm))
t.start()
def nmapScanThread(tgtHost, tgtPort, lock, nm):
nm.scan(tgtHost, tgtPort)
state = nm[tgtHost]['tcp'][int(tgtPort)]['state']
lock.acquire()
print("Port {} is {}".format(tgtPort, state))
lock.release()
if __name__ == '__main__':
(tgtHost, tgtPorts) = initParser()
nmapScan(tgtHost, tgtPorts)
sys.exit(0)
So, after i have run the script i don't see what i'm typing on the console anymore, but i can still execute my invisible commands. As you can see i want to start a thread for each port just because i am learning about threading right now.
My assumption is that not all threads are terminated properly because everthing works just fine after i have added "t.join()" to the code.
Unfortunately i couldn't manage to find anything about this issue.
Just like this:
import argparse, nmap, sys
from threading import *
def initParser():
parser = argparse.ArgumentParser()
parser.add_argument("tgtHost", help="Specify target host")
parser.add_argument("tgtPort", help="Specify target port")
args = parser.parse_args()
return (args.tgtHost,args.tgtPort.split(","))
def nmapScan(tgtHost, tgtPorts):
nm = nmap.PortScanner()
lock = Semaphore(value=1)
for tgtPort in tgtPorts:
t = Thread(target=nmapScanThread, args=(tgtHost, tgtPort, lock, nm))
t.start()
t.join()
def nmapScanThread(tgtHost, tgtPort, lock, nm):
nm.scan(tgtHost, tgtPort)
state = nm[tgtHost]['tcp'][int(tgtPort)]['state']
lock.acquire()
print("Port {} is {}".format(tgtPort, state))
lock.release()
if __name__ == '__main__':
(tgtHost, tgtPorts) = initParser()
nmapScan(tgtHost, tgtPorts)
sys.exit(0)
Is it the proper way to handle this problem or did i mess things up a bit?
Additionally:
I cannot see the join() method as useful in this example because i don't think that there is any major difference to the same script without threading
Related
recently i have been working with Pipe and raspberry pi. I am trying to send a signal to my function to kill it however the "pipe.recv" is blocking the function. The signal is sent however the while loop doesnt get executed.
from multiprocessing import Process, Pipe
import time
import os
import signal
def start(pipe):
pipe1 = pipe[1].recv()
while True:
print('hello world')
os.kill(pipe1,signal.SIGTERM)
if __name__ == "__main__":
conn1 = Pipe()
a = Process(target = start,args = (conn1,))
a.start()
time.sleep(5)
print("TIMES UP")
conn1[1].send(a.pid)
You are sending, and attempting to retrieve the item from the same end of the pipe. Try this, where pipe[0] and pipe[1] are named to parent and child, for readability, instead:
from multiprocessing import Process, Pipe
import time
import os
import signal
def start(child):
pipe1 = child.recv()
while True:
print('hello world')
os.kill(pipe1,signal.SIGTERM)
if __name__ == "__main__":
parent, child = Pipe()
a = Process(target = start,args = (child,))
a.start()
time.sleep(5)
print("TIMES UP")
parent.send(a.pid)
I am just getting started with Multiprocessing & Python, and I am needing some help catching Control-C in my program. The script that I am making is going to read in a file, and then perform some tasks on each line. Before anyone comments on I/O and the advantages/disadvantages of multiprocessing, I am aware :) these tasks lend themselves to be very multi-threaded friendly.
I have the following code, and from the documentation, I would expect it to work, however it is not catching my keyboard exception! ARRGH... Please help
Running on Win10 if that makes any difference:
from multiprocessing import cpu_count
from multiprocessing.dummy import Pool as ThreadPool
import argparse
from time import sleep
import signal
import sys
def readfile(file):
with open(file, 'r') as file:
data = file.readlines()
file.close()
return data
def work(line):
while(True):
try:
print(f"\rgoing to do some work on {line}")
countdown(5)
except (KeyboardInterrupt, SystemExit):
print("Exiting...")
break
def countdown(time=30):
sleep(time)
def parseArgs(args):
if args.verbose:
verbose = True
print("[+] Verbosity turned on")
else:
verbose = False
if args.threads:
threads = args.threads
else:
threads = cpu_count()
print(f'[+] Using {threads} threads')
return threads, verbose, args.file
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", required = True, help="Insert the flie you plan on parsing")
parser.add_argument("-t", "--threads", help="Number of threads, by default will use all available processors")
parser.add_argument("-v", "--verbose", help="increase output verbosity",
action="store_true")
threads, verbose, filename = parseArgs(parser.parse_args())
#read the entire file and store it in a variable:
data = readfile(filename)
#Init the data pool
pool = ThreadPool(threads) # Number of threads going to use
try:
pool.map(work,data) # This launches the workers at the function to do work
except KeyboardInterrupt:
print("Exiting...")
finally:
pool.close()
pool.join()
At the time when you use Control-C the program probably is at pool.join() waiting for all threads to be finished. The pool.map function just starts all the processes but does not block. This means that at the time the KeyboardInterrupt occurs it is not catched because the program is not inside the try-block.
I am not too sure about the best practices here but I would try:
try:
pool.map(work, data) # This launches the workers at the function to do work
pool.close()
pool.join()
except KeyboardInterrupt:
print("Exiting...")
I have two independent processes on the same machine in need of IPC. As of now, I have this working solution:
server.py
#!/usr/bin/python3
from multiprocessing.managers import BaseManager
from multiprocessing import Process, Queue
def do_whatever():
print('function do whatever, triggered by xyz')
# do something
def start_queue_server(q):
class QueueManager(BaseManager): pass
QueueManager.register('get_queue', callable=lambda:q)
m = QueueManager(address=('', 55555), authkey=b'tuktuktuk')
s = m.get_server()
s.serve_forever()
def main():
queue = Queue()
proc = Process(target=start_queue_server, args=(queue,))
proc.start()
while True:
command = queue.get()
print('command from queue:', command)
if command == 'xyz':
do_whatever()
# many more if, elif, else statements
if __name__ == "__main__":
main()
client.py
#!/usr/bin/python3
from multiprocessing.managers import BaseManager
def communicator(command):
class QueueManager(BaseManager): pass
QueueManager.register('get_queue')
m = QueueManager(address=('', 55555), authkey=b'tuktuktuk')
m.connect()
queue = m.get_queue()
queue.put(command)
def main():
command = ('xyz')
communicator(command)
if __name__ == "__main__":
main()
Is there a more elegant way to call 'do_whatever' than parsing the commands passed on by the queue and then calling the target function?
Can I somehow pass on a reference to 'do_whatever' and call it directly from the client?
How is an answer from the server, e.g. True or False, communicated to the client? I tried passing a shared variable instead of a queue object but failed. Do I need to open another connection using a second socket to pass the answer?
I read the python documentation but couldn't find more options for unrelated processes. Inputs would be welcome!
Cheers singultus
Finally, I settled for an additional Listener
server.py
#!/usr/bin/python3
from multiprocessing.managers import BaseManager
from multiprocessing import Process, Queue
from multiprocessing.connection import Client
def do_whatever():
print('function do whatever, triggered by xyz')
# do something
def start_queue_server(q):
class QueueManager(BaseManager): pass
QueueManager.register('get_queue', callable=lambda:q)
m = QueueManager(address=('', 55555), authkey=b'tuktuktuk')
s = m.get_server()
s.serve_forever()
def talkback(msg, port):
conn = Client(address=('', port), authkey=b'tuktuktuk')
conn.send(msg)
conn.close()
def main():
queue = Queue()
proc = Process(target=start_queue_server, args=(queue,))
proc.start()
while True:
command = queue.get()
print('command from queue:', command)
if command[0] == 'xyz':
do_whatever()
talkback('aaa', command[1])
# many more if, elif, else statements
if __name__ == "__main__":
main()
client.py
#!/usr/bin/python3
from multiprocessing.managers import BaseManager
from multiprocessing.connection import Listener
def communicator(command, talkback=False):
if talkback:
listener = Listener(address=('', 0), authkey=b'prusaprinter')
return_port = listener.address[1]
command = command + (return_port,)
class QueueManager(BaseManager): pass
QueueManager.register('get_queue')
m = QueueManager(address=('', 55555), authkey=b'tuktuktuk')
m.connect()
queue = m.get_queue()
queue.put(command)
if talkback:
conn = listener.accept()
server_return = conn.recv()
conn.close()
listener.close()
return server_return
def main():
command = ('xyz')
communicator(command, True)
if __name__ == "__main__":
main()
The client opens an available port and starts listening on it. It then sends the command to the server together with the aforementioned port number. The server executes the command, then uses the port number to report back to the client. After receiving the answer, the client closes the port.
I have yet another question about Python multiprocessing.
I have a module that creates a Process and just runs in a while True loop.
This module is meant to be enabled/disabled from another Python module.
That other module will import the first one once and is also run as a process.
How would I better implement this?
so for a reference:
#foo.py
def foo():
while True:
if enabled:
#do something
p = Process(target=foo)
p.start()
and imagine second module to be something like that:
#bar.py
import foo, time
def bar():
while True:
foo.enable()
time.sleep(10)
foo.disable()
Process(target=bar).start()
Constantly running a process checking for condition inside a loop seems like a waste, but I would gladly accept the solution that just lets me set the enabled value from outside.
Ideally I would prefer to be able to terminate and restart the process, again from outside of this module.
From my understanding, I would use a Queue to pass commands to the Process. If it is indeed just that, can someone show me how to set it up in a way that I can add something to the queue from a different module.
Can this even be easily done with Python or is it time to abandon hope and switch to something like C or Java
I purposed in comment two different approches :
using a shared variable from multiprocessing.Value
pause / resume the process with signals
Control by sharing a variable
def target_process_1(run_statement):
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def target_process_2(run_statement):
time.sleep(3)
print "Stoping"
run_statement.value = False
time.sleep(3)
print "Resuming"
run_statement.value = True
if __name__ == "__main__":
run_statement = Value("i", 1)
process_1 = Process(target=target_process_1, args=(run_statement,))
process_2 = Process(target=target_process_2, args=(run_statement,))
process_1.start()
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Control by sending a signal
from multiprocessing import Process
import time
import os, signal
def target_process_1():
while True:
print "Running !"
time.sleep(1)
def target_process_2(target_pid):
time.sleep(3)
os.kill(target_pid, signal.SIGSTOP)
time.sleep(3)
os.kill(target_pid, signal.SIGCONT)
if __name__ == "__main__":
process_1 = Process(target=target_process_1)
process_1.start()
process_2 = Process(target=target_process_2, args=(process_1.pid,))
process_2.start()
time.sleep(8)
process_1.terminate()
process_2.terminate()
Side note: if possible do not run a while True.
EDIT: if you want to manage your process in two different files, supposing you want to use a control by sharing a variable, this is a way to do.
# file foo.py
from multiprocessing import Value, Process
import time
__all__ = ['start', 'stop', 'pause', 'resume']
_statement = None
_process = None
def _target(run_statement):
""" Target of the foo's process """
while True:
if run_statement.value:
print "I'm running !"
time.sleep(1)
def start():
global _process, _statement
_statement = Value("i", 1)
_process = Process(target=_target, args=(_statement,))
_process.start()
def stop():
global _process, _statement
_process.terminate()
_statement, _process = None, _process
def enable():
_statement.value = True
def disable():
_statement.value = False
Problem: I expect child to time out and be done. but instead it times out and begins to run again.
Can anyone tell me why this program runs forever? I expect it to run one time and exit...
Here is a working program. Master threads a function to spawn a child. Works great except it ends up looping.
Here is the master:
# master.py
import multiprocessing, subprocess, sys, time
def f():
p = subprocess.Popen(["C:\\Python32\\python.exe", "child.py"])
# wait until child ends and check exit code
while p.poll() == None:
time.sleep(2)
if p.poll() != 0:
print("something went wrong with child.py")
# multithread a function process to launch and monitor a child
p1 = multiprocessing.Process(target = f())
p1.start()
and the child:
# child.py
import socket, sys
def main(args):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(10)
sock.bind(('', 54324))
data, addr = sock.recvfrom(1024) # buffer size is 1024 bytes
print(data)
sock.close()
return 0
except KeyboardInterrupt as e:
try:
sock.close()
return 0
except:
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
The problem is that your master.py doesn't have an if __name__ == '__main__' guard. On Windows, multiprocessing has to be able to reimport the main module in the child process, and if you don't use this if guard, you will re-execute the multiprocessing.Process in the child (resulting in an accidental forkbomb).
To fix, simply put all of the commands in master.py in the if guard:
if __name__ == '__main__':
# multithread a function process to launch and monitor a child
p1 = multiprocessing.Process(target = f())
p1.start()