Threaded Python port scanner - python

I am having issues with a port scanner I'm editing to use threads.
This is the basics for the original code:
for i in range(0, 2000):
s = socket(AF_INET, SOCK_STREAM)
result = s.connect_ex((TargetIP, i))
if(result == 0) :
c = "Port %d: OPEN\n" % (i,)
s.close()
This takes approx 33 minutes to complete. So I thought I'd thread it to make it run a little faster. This is my first threading project so it's nothing too extreme, but I've ran the following code for about an hour and get no exceptions yet no output. Am I just doing the threading wrong or what?
import threading
from socket import *
import time
a = 0
b = 0
c = ""
d = ""
def ScanLow():
global a
global c
for i in range(0, 1000):
s = socket(AF_INET, SOCK_STREAM)
result = s.connect_ex((TargetIP, i))
if(result == 0) :
c = "Port %d: OPEN\n" % (i,)
s.close()
a += 1
def ScanHigh():
global b
global d
for i in range(1001, 2000):
s = socket(AF_INET, SOCK_STREAM)
result = s.connect_ex((TargetIP, i))
if(result == 0) :
d = "Port %d: OPEN\n" % (i,)
s.close()
b += 1
Target = raw_input("Enter Host To Scan:")
TargetIP = gethostbyname(Target)
print "Start Scan On Host ", TargetIP
Start = time.time()
threading.Thread(target = ScanLow).start()
threading.Thread(target = ScanHigh).start()
e = a + b
while e < 2000:
f = raw_input()
End = time.time() - Start
print c
print d
print End
g = raw_input()

This is where your code is failing.
threading.Thread(target = ScanLow).start()
threading.Thread(target = ScanHigh).start()
e = a + b
while e < 2000:
f = raw_input()
Immediately after you start your threads, you set the value to e. However, you never update e after that, so the loop never exits.
It also seems like you are doing this to wait until both threads have finished. The join() method is is a better way to do this.
from threading import Thread
threads = []
threads.append(Thread(target = ScanLow))
threads.append(Thread(target = ScanHigh))
for thread in threads:
thread.start()
//both threads are running
for thread in threads:
thread.join()
//both threads have stopped
Edit:
Not related to your question, but a helpful comment. Both of your scan functions are doing the exact same thing. You can replace them with one function that takes the scan range as arguments and start both threads with the one function.
from threading import Thread
def Scan(start, stop):
global a
global c
for i in range(start, stop):
s = socket(AF_INET, SOCK_STREAM)
result = s.connect_ex((TargetIP, i))
if(result == 0) :
c = "Port %d: OPEN\n" % (i,)
s.close()
a += 1
threadCount = 2
totalPorts = 2000
threads = []
for start in xrange(0, totalPorts, totalPorts/threadCount):
threads.append(Thread(target = Scan, args = (start, totalPorts/threadCount)))
for thread in threads:
thread.start()
//both threads are running
for thread in threads:
thread.join()
//both threads have stopped
And now you can easily adjust the number of threads and ports to scan.

You have an awkward method for monitoring the threads. Using join will indicate when the thread is complete. No reason not to spin off more threads to get the results faster as well:
import threading
import socket
import time
ports = []
def check_port(ip,port):
s = socket.socket()
if s.connect_ex((ip,port)) == 0:
ports.append(port)
s.close()
target = raw_input('Target? ')
s = time.time()
threads = []
for port in range(2000):
t = threading.Thread(target=check_port,args=(target,port))
t.start()
threads.append(t)
for t in threads:
t.join()
print ports
print time.time() - s
Output
[80, 135, 445, 1028]
6.92199993134

Related

How can I make 100 threads write numbers increasingly in a single file?

I know, I should use .join(). I am already using, but here is the thing: I make a round of threads (about 100) to perform some action, and after they complete, I start another 100 threads.
The context is that I am trying to check if x ports on my pc are open using threads. I start 100 threads and each check 100 different values and write their response into a txt file. The problem is that some of the ports are not being written to the file, while others are. When I run the code below, wanting to scan the ports from 3000 to 4000, I wanted my file to have 1000 lines, each specifying if the port is open or closed, but when I run it, it has, like, 930. Sometimes more, sometimes less, but never 1000 lines. Check below this code for another thing I tried.
def check_range_ports(ip_host, initial_port, final_port):
threads = []
count = initial_port
loop = 0
number_of_threads = 100
while count < final_port:
if count + number_of_threads > final_port:
number_of_threads = final_port - count + 1
for i in range(count, count + number_of_threads):
t = threading.Thread(target=check_port, args=(ip_host, i))
t.daemon = True
threads.append(t)
for i in range(number_of_threads):
threads[i].start()
for i in range(number_of_threads):
threads[i].join()
count += number_of_threads
loop += 1
threads = []
def check_port(ip_host, port):
try:
time.sleep(0.5)
my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
my_socket.settimeout(5)
result = my_socket.connect_ex((ip_host, port))
with open("./ports.txt", "a+", encoding="utf-8") as f:
if result == 0:
f.write(f"Port {port} is open.\n")
else:
f.write(f"Port {port} is closed.\n")
my_socket.close()
except socket.timeout:
print("Timeout on socket!")
sys.exit()
except socket.gaierror:
print("Error on the host!")
sys.exit()
except KeyboardInterrupt:
print("Exiting program!")
sys.exit()
Here is another thing I tried. I created 10 threads, and each of these threads created 100 subthreads more, and each of these subthreads would write a line in a file. It works better than the previous, but I can't get 1000 lines exactly, which is what I am aiming.
What I'm thinking of doing is doable? If yes, how can I achieve it?
def start_threads(ip_host, initial_port, final_port):
threads = []
initial_port = 3000
final_port = 4000
number_of_ports_to_be_scanned = final_port - initial_port
ip_host = 'XXX.XXX.X.XX'
number_of_threads = 0
if number_of_ports_to_be_scanned / 100 != 0:
number_of_threads = int(number_of_ports_to_be_scanned / 100) + 1
else:
number_of_threads = number_of_ports_to_be_scanned / 100
count = 0
for i in range(number_of_threads):
# if initial_port + count > final_port:
# number_of_threads = final_port - number_of_ports_to_be_scanned + 1
t = threading.Thread(
target=check_testing_port,
args=(ip_host, initial_port + count, final_port)
)
# t.daemon = True
t.start()
threads.append(t)
count += 100
# for i in range(number_of_threads):
# threads[i].start()
for i in range(number_of_threads):
threads[i].join()
def check_testing_port(ip_host, port, final_port):
sub_threads = []
number_of_sub_threads = 100
print(port)
if port + 100 > final_port:
number_of_sub_threads = port - final_port
for i in range(port, port + number_of_sub_threads):
t = threading.Thread(target=check_port, args=(ip_host, i))
# t.daemon = True
t.start()
sub_threads.append(t)
# for i in range(number_of_sub_threads):
# sub_threads[i].start()
for i in range(number_of_sub_threads):
sub_threads[i].join()
def check_port(ip_host, port):
with open("./testing_ports.txt", "a", encoding="utf-8") as f:
f.write(f"Port {port}" + "\n")
In check_port you wrote
with open("ports.txt", "a+") as f:
f.write(...)
That is insane.
In the sense that, it is a critical section and you're not holding a lock.
Acquire a mutex before messing with the file.
Or write thread-specific files, which
subsequently are combined into a single file.
Better yet, tell all threads to write to
a single Queue, and have just one thread
read enqueued results and append them to a text file.
You need to properly synchronise the access to the shared buffer you are writing to (the output file) concurrently. Only one thread at a time must write to the output file, otherwise you'll get a data race leading to the data corruption you observed.
You can ensure that only one thread is writing to the shared file by using a mutex, a queue or any other suitable concurrency primitive. Here is an example using a queue:
import threading
import time
from queue import Queue
# Sentinel object to signal end of writing
__END = object()
def check_range_ports(ip_host: str):
threads: list[threading.Thread] = []
number_of_threads = 100
queue = Queue()
# Start the compute threads
for i in range(number_of_threads):
t = threading.Thread(target=check_port, args=(ip_host, i, queue))
t.start()
threads.append(t)
# Start the writer thread
tw = threading.Thread(target=writer, args=("output.txt", queue))
tw.start()
# Wait for all compute threads to finish
for i in range(number_of_threads):
threads[i].join()
# Signal to the writer loop to end
queue.put(__END)
# Wait for the writer thread to finish
tw.join()
def check_port(ip_host: str, port: int, queue: Queue):
time.sleep(0.5)
# Enqueue the result to teh synchronisation queue
queue.put((ip_host, port))
def writer(filename: str, queue: Queue):
with open(filename, "w") as f:
while True:
item = queue.get()
# End the write loop if there is no more item
# to process
if item is __END:
break
# This write operation is sequential and thread-safe
ip_host, port = item
f.write(f"Port {port} of host {ip_host} is open.\n")
def main():
check_range_ports("127.0.0.1")
if __name__ == "__main__":
main()

How would I export my results to a .txt

I have the following code running and I would like to have the output of the script exported to a .txt file for later viewing. How could I go about doing this?
import socket, threading
def TCP_connect(ip, port_number, delay, output):
TCPsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TCPsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
TCPsock.settimeout(delay)
try:
TCPsock.connect((ip, port_number))
output[port_number] = 'Listening'
except:
output[port_number] = ''
def scan_ports(host_ip, delay):
threads = [] # To run TCP_connect concurrently
output = {} # For printing purposes
# Spawning threads to scan ports
for i in range(10000):
t = threading.Thread(target=TCP_connect, args=(host_ip, i, delay, output))
threads.append(t)
# Starting threads
for i in range(10000):
threads[i].start()
# Locking the script until all threads complete
for i in range(10000):
threads[i].join()
# Printing listening ports from small to large
for i in range(10000):
if output[i] == 'Listening':
print(str(i) + ': ' + output[i])
def main():
host_ip = input("Enter host IP: ")
delay = int(input("How many seconds the socket is going to wait until timeout: "))
scan_ports(host_ip, delay)
if __name__ == "__main__":
main()
print ("Thank you for scanning")
Any help would be greatly appreciated.
with open("your_desired_txt_filename", "a") as myFile:
Then, at the part where you want to write to the file:
myFile.write(your_content) ## Indent this line with the suitable level of indentation (it will be at least one indentation level deeper than the `with` clause

Python saving execution time when multithreading

I am having a problem when multithreading and using queues in python 2.7. I want the code with threads to take about half as long as the one without, but I think I'm doing something wrong. I am using a simple looping technique for the fibonacci sequence to best show the problem.
Here is the code without threads and queues. It printed 19.9190001488 seconds as its execution time.
import time
start_time = time.time()
def fibonacci(priority, num):
if num == 1 or num == 2:
return 1
a = 1
b = 1
for i in range(num-2):
c = a + b
b = a
a = c
return c
print fibonacci(0, 200000)
print fibonacci(1, 100)
print fibonacci(2, 200000)
print fibonacci(3, 2)
print("%s seconds" % (time.time() - start_time))
Here is the code with threads and queues. It printed 21.7269999981 seconds as its execution time.
import time
start_time = time.time()
from Queue import *
from threading import *
numbers = [200000,100,200000,2]
q = PriorityQueue()
threads = []
def fibonacci(priority, num):
if num == 1 or num == 2:
q.put((priority, 1))
return
a = 1
b = 1
for i in range(num-2):
c = a + b
b = a
a = c
q.put((priority, c))
return
for i in range(4):
priority = i
num = numbers[i]
t = Thread(target = fibonacci, args = (priority, num))
threads.append(t)
#print threads
for t in threads:
t.start()
for t in threads:
t.join()
while not q.empty():
ans = q.get()
q.task_done()
print ans[1]
print("%s seconds" % (time.time() - start_time))
What I thought would happen is the multithreaded code takes half as long as the code without threads. Essentially I thought that all the threads work at the same time, so the 2 threads calculating the fibonacci number at 200,000 would finish at the same time, so execution is about twice as fast as the code without threads. Apparently that's not what happened. Am I doing something wrong? I just want to execute all threads at the same time, print in the order that they started and the thread that takes the longest time is pretty much the execution time.
EDIT:
I updated my code to use processes, but now the results aren't being printed. Only an execution time of 0.163000106812 seconds is showing. Here is the new code:
import time
start_time = time.time()
from Queue import *
from multiprocessing import *
numbers = [200000,100,200000,2]
q = PriorityQueue()
processes = []
def fibonacci(priority, num):
if num == 1 or num == 2:
q.put((priority, 1))
return
a = 1
b = 1
for i in range(num-2):
c = a + b
b = a
a = c
q.put((priority, c))
return
for i in range(4):
priority = i
num = numbers[i]
p = Process(target = fibonacci, args = (priority, num))
processes.append(p)
#print processes
for p in processes:
p.start()
for p in processes:
p.join()
while not q.empty():
ans = q.get()
q.task_done()
print ans[1]
print("%s seconds" % (time.time() - start_time))
You've run in one of the basic limiting factors of the CPython implementation, the Global Interpreter Lock or GIL. Effectively this serializes your program, your threads will take turns executing. One thread will own the GIL, while the other threads will wait for the GIL to come free.
One solution would to be use separate processes. Each process would have its own GIL so would execute in parallel. Probably the easiest way to do this is to use Python's multiprocessing module as replacement for the threading module.

How to run a thread more than once in python

I am trying to run a thread more than once and keep getting an error:
RuntimeError: threads can only be started once
I have tried reading up multithreading and implementing it in my code without any luck.
Here is the function I am threading:
def receive(q):
host = ""
port = 13000
buf = 1024
addr = (host,port)
Sock = socket(AF_INET, SOCK_DGRAM)
Sock.bind(addr)
(data, addr) = Sock.recvfrom(buf)
q.put(data)
Here is the code I want to run:
q = Queue.Queue()
r = threading.Thread(target=receive, args=(q,))
while True:
r.start()
if q.get() == "stop":
print "Stopped"
break
print "Running program"
When the stop message gets sent, the program should break out of the while loop, but it does not run due to multithreading. The while loop should constantly print out Running program, until the stop message is sent.
The queue is used to receive the variable data from the receive function (which is the stop).
Here is a working example (for python 2.7).
The program has two modes of operation:
with no arguments it runs the receive loop
with arguments it sends a datagram
Note how r.start() and r.terminate() are called outside of the while loop in client.
Also, receive has a while True loop.
import sys
import socket
from multiprocessing import Process, Queue
UDP_ADDR = ("", 13000)
def send(m):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(m, UDP_ADDR)
def receive(q):
buf = 1024
Sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
Sock.bind(UDP_ADDR)
while True:
(data, addr) = Sock.recvfrom(buf)
q.put(data)
def client():
q = Queue()
r = Process(target = receive, args=(q,))
r.start()
print "client loop started"
while True:
m = q.get()
print "got:", m
if m == "stop":
break
print "loop ended"
r.terminate()
if __name__ == '__main__':
args = sys.argv
if len(args) > 1:
send(args[1])
else:
client()
I think the problem is once the thread is started, calling thread.start() again throws the error.
Using a try block would might work as a simple fix:
while True:
try:
r.start()
except Exception:
#or except RunTimeError:
pass
if q.get() == "stop":
print "Stopped"
break
print "Running program"

Executing different functions in a single for loop in python

For Instance I have three functions and a single for loop in python and I want to execute all these functions sequentially for example on first iteration function 1 should be executed and on 2nd iteration function 2 and so on
The three functions are:
from scapy.all import *
from random import randint
import threading
import time
from datetime import datetime
import multiprocessing
from itertools import count
#pktList = []
#pktsInt = 0
#def Packets():
# Generate packet
#for run_no in range(0,1)
p = raw_input('Enter PACKETs to send: ')
pktsInt = int(p)
pkts = IP(src="10.0.0.1",dst="10.0.0.2")/TCP()/"GET /HTTP/1.0\r\n\r\n"/Raw(RandString(size=120))
#print pkts
pkts[TCP].flags = "UFP"
pktList = []
for pktNum in range(0,pktsInt):
pktList.extend(pkts)
pktList[pktNum][TCP].dport = 80
#randint(1,65535) # Pkt has Ran PortNo.
print pktList[pktNum].summary()
#print len(pktList[pktNum])
#wrpcap('tcp-packets.pcap',pktList[pktNum])
#queue.put((run_no, pktsInt, pktsList))
# Send the list of packets send(pktList)
def send_http(queue):
for run_number in range(0,1): # this will run indefinitely, same as while True, must be killed to stop.
start=datetime.now()
print "\nStart Time: ", start
start_time=time.time()
send(pktList)
end = datetime.now()
print "\nEnd Time: ", end
totalTime = time.time()-start_time
totalBytes=(pktsInt*120)/totalTime
#print totalBytes,"Seconds"
queue.put((run_number, totalTime, totalBytes))
# Generate packet
pkts1 = IP(dst="10.0.0.2")/fuzz(UDP()/NTP(version=4))/Raw(RandString(size=120))
#print pkts
pkts1[UDP].flags = "UFP"
pktList1 = []
for pktNum1 in range(0,10):
pktList1.extend(pkts1)
pktList1[pktNum1][UDP].dport = randint(1,65535) # Pkt has Ran PortNo.
print pktList1[pktNum1].summary()
#print len(pktList1[pktNum1])
#wrpcap('udp-packets.pcap',pktList1[pktNum1])
# Send the list of packets send(pktList)
def send_ntp(queue):
for run_number in range(1,2): # this will run indefinitely, same as while True, must be killed to stop.
start1 = datetime.now()
print "\nStart Time: ", start1
start_time1=time.time()
send(pktList1)
end1 = datetime.now()
print "\nEnd Time: ", end1
totalTime = time.time()-start_time1
totalBytes=(10*120)/totalTime
#print totalBytes,"Seconds"
queue.put((run_number, totalTime, totalBytes))
# Generate packet
pkts2 = IP(src="10.0.0.1",dst="10.0.0.2")/TCP()/Raw(RandString(size=120))
#print pkts
pkts2[TCP].flags = "UFP"
pktList2 = []
for pktNum2 in range(0,5):
pktList2.extend(pkts2)
pktList2[pktNum2][TCP].dport = 25 # Pkt has Ran PortNo.
print pktList2[pktNum2].summary()
#print len(pktList2[pktNum2])
#wrpcap('tcp-packets.pcap',pktList[pktNum])
def send_smtp(queue):
# Send the list of packets send(pktList)
for run_number in range(2,3): # this will run indefinitely, same as while True, must be killed to stop.
start2 = datetime.now()
print "\n Start Time: ", start2
start_time2=time.time()
send(pktList2)
totalTime = time.time()-start_time2
end2 = datetime.now()
print "\nEnd Time: ", end2
totalBytes=(5*120)/totalTime
#print totalBytes,"Seconds"
queue.put((run_number, totalTime, totalBytes))
#print pktList[0].summary()
#start_time=time.time()
#send(pktList2)
#print pktList2[0].show()
#print pktList2[0].show2()
q = multiprocessing.Queue()
#t1 = multiprocessing.Process(target=Packets)
t = multiprocessing.Process(target=send_http, args=(q, ))
p = multiprocessing.Process(target=send_ntp, args=(q, ))
r = multiprocessing.Process(target=send_smtp, args=(q, ))
#t1.start()
t.start()
time.sleep(12) # "Some interval of time"
p.start()
time.sleep(16)
r.start()
time.sleep(29)
if t.is_alive():
t.terminate()
if p.is_alive():
p.terminate()
if r.is_alive():
r.terminate()
rates = []
while True: # This loop will pull all items out of the queue and display them.
run = q.get()
if not run: # When we reach the end of the queue, exit
break
run_number, total_time, total_bytes = run
print "Run {run_number} took a total of {total_time}\
at an average rate of {total_bytes:.1f} B/s".format(run_number=run_number,
total_time=total_time,
total_bytes=total_bytes)
rates.append(total_bytes)
print "Average rate of {0:.1f} B/s".format(sum(rates)/float(len(rates)))
and a for-loop
# Make a function iterable, by repeatedly calling it.
def make_iterable(func, *args):
try:
while 1:
yield func(*args)
except:
pass
uni_rand = make_iterable(random.uniform, 0, 1)
# A generator for inter-arrival times.
inter_arrival = ( -(1./a)*math.log(u) for u in uni_rand)
# Generate inter-arrival times, then sleep for that long.
inter_arrival_iter = iter(inter_arrival)
for i in xrange(count):
inter_arrival_seconds = inter_arrival_iter.next() * 3600.
print "Sleeping for %f seconds." % inter_arrival_seconds
time.sleep(inter_arrival_seconds)
#func1()
#Sequential Function Calling Here except for the executed one
Now The Issue is I am using multiprocessing with all the above mentioned functions, How would I call them now to generate different arrival time
Just put the functions in an iterable and call them one at a time in a for loop:
for afunc in (func1, func2, func3, func4):
afunc()
You could put the functions in a list:
functions = [func1, func2, func3, func4]
for i in range(20):
function = functions[i%4]
EDIT
What will it do?
>>> def func1():
print('I am the first function!')
>>> def func2():
print('I am the second function!')
>>> def func3():
print('I am the third function!')
>>> def func4():
print('I am the fourth function!')
>>> functions = [func1, func2, func3, func4]
>>> for i in range(10):
function = functions[i%4]
function()
I am the first function!
I am the second function!
I am the third function!
I am the fourth function!
I am the first function!
I am the second function!
I am the third function!
I am the fourth function!
I am the first function!
I am the second function!
EDIT 2
Do you simply want the same thing except with 3 functions?
functions = [func1, func2, func3]
for i in xrange(count):
functions[i%3]()
#do the rest of your stuff here

Categories