browser doesn't response well to https proxy - python

I'v built my own https proxy and when ever I send some data to a browser the browser responses with nothing and also after a lot of time.
basically all the proxy should do is just forward the message to browser, get the response and forward back to the client
the code of the proxy:
import socket
import select
serverSock = socket.socket()
serverSock.bind(('0.0.0.0', 8080))
serverSock.listen(3)
waiting_clients = {} # client : browser
users_dict = {}
open_clients = {}
browsers_clients = {} # browser : client
threading.Thread(target=browserCom).start()
while True:
try:
rlist, wlist, xlist = select.select(list(users_dict.keys()) + [serverSock], [], [], 0.3)
except:
pass
else:
for current_socket in rlist:
if current_socket is serverSock:
# new client
client, address = serverSock.accept()
print(f'{address} - connected to proxy')
# add to dictionary
users_dict[client] = address
open_clients[address] = client
else:
# receive info
receiving = True
msg = bytearray()
while receiving:
try:
data = current_socket.recv(1024)
except Exception as e:
print(e, 3)
if current_socket in users_dict.keys():
disconnect(users_dict[current_socket])
else:
current_socket.close()
break
else:
msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
if len(msg) == 0:
if current_socket in users_dict.keys():
disconnect(users_dict[current_socket])
else:
print("GOT FROM CLIENT", msg)
if current_socket in waiting_clients.keys():
# sending the data from client to browser
waiting_clients[current_socket].send(msg)
else:
msg = msg.decode()
msgSplit = msg.split()
address = msgSplit[1]
if address.split(':')[1].isnumeric():
if msg.startswith('CONNECT'):
browserLink, browserPort = address.split(':')
browserPort = int(browserPort)
browserIP = socket.gethostbyname(browserLink)
address = (browserIP, browserPort)
# connect to the site
browserSocket = socket.socket()
print(address)
browserSocket.connect((browserIP, browserPort))
waiting_clients[current_socket] = browserSocket
browsers_clients[browserSocket] = current_socket
msg_ret = "HTTP/1.1 200 Connection established\r\n\r\n"
sendMsg(users_dict[current_socket], msg_ret)
m
The proxy is able to make the connection after the CONNECT and notify to the client but after I send to the browser I got from the data with a function running in the background:
def browserCom():
while True:
try:
rlist, wlist, xlist = select.select(list(browsers_clients.keys()), [], [], 0.3)
except:
pass
else:
for current_browser in rlist:
# receive data from the browser
receiving = True
resp_msg = bytearray()
while receiving:
try:
data = current_browser.recv(1024)
except Exception as e:
print(e)
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
else:
resp_msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
print("RESPONSE FROM BROWSER", resp_msg)
# sending the msg to the client
sendMsg(users_dict[browsers_clients[current_browser]], resp_msg)
I need to wait a lot of time for the response and most of the responses come empty the responses are mostly bytearray(b'') and even when I get the response even though I sent the response back to the client:
# sending the msg to the client
sendMsg(users_dict[browsers_clients[current_browser]],resp_msg)
using this
def sendMsg(address, msg):
"""
:param ip: ip to send to
:param msg: msg to send
:return: sends the msg to the ip
"""
if address in open_clients.keys():
sock = open_clients[address]
if type(msg) == str:
msg = msg.encode()
try:
sock.send(msg)
except Exception as e:
print(e, 4)
disconnect(address)
I hope you are abled to understand my code, please if something is unclear ask me in the comments and I will try to help you understand as soon as possible
this is the best that I can do to keep the code minimal for this problem without removing crucial parts

My mistake was that I didn't understand that while tunneling both the browser and the client exchange messages, adding another select to all the browser helped me check all the data from all the browsers and now it works.
The code above is updated and works
basically what I added is :
def browserCom():
while True:
try:
rlist, wlist, xlist = select.select(list(browsers_clients.keys()), [], [], 0.3)
except:
pass
else:
for current_browser in rlist:
# receive data from the browser
receiving = True
resp_msg = bytearray()
while receiving:
try:
data = current_browser.recv(1024)
except Exception as e:
print(e)
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
else:
resp_msg.extend(data)
# got the full msg
if len(data) < 1024:
receiving = False
# disconnecting browser
if resp_msg == bytearray(b''):
del waiting_clients[browsers_clients[current_browser]]
current_browser.close()
browsers_clients[current_browser].close()
del browsers_clients[current_browser]
print("RESPONSE FROM BROWSER", resp_msg)
# sending the msg to the client
if current_browser in browsers_clients and browsers_clients[current_browser] in users_dict:
sendMsg(users_dict[browsers_clients[current_browser]], resp_msg)

Related

Can't implement receiving full string using STX and ETX condition

I am developing a python socket server. The client send each message to start with a STX (\x02) and end with ETX (\x03). My code can receive message successfully but I can't implement receiving full string using STX and ETX condition. Need help in resolving this issue. Below I have sharing my code for better understanding.
import socket
import time
# Start New RnD
# Global Veriable
enq = chr(5)
ack = chr(6)
stx = chr(2)
etx = chr(3)
# Connect to the server with `telnet $HOSTNAME 5000`.
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setblocking(False)
server.bind(('0.0.0.0', 5000))
server.listen(1)
connections = []
while True:
try:
connection, address = server.accept()
connection.setblocking(False)
connections.append(connection)
except BlockingIOError:
pass
# Incoming Data Processing
for connection in connections:
try:
full_message = ''
data = ""
while True:
try:
received = connection.recv(1)
if received == enq.encode('utf-8'):
print("Received <ENQ>, Sending <ACK>")
connection.sendall(ack.encode('utf-8'))
if not received:
raise RuntimeError("unexpected end-of-message", data)
data += received.decode('utf-8')
#print("Received: {!r}".format(data))
if "\x03" in received.decode("utf-8") :
break
except BlockingIOError:
pass
print("Full Received: {!r}".format(data))
print("Data Received, Sending <ACK>")
connection.sendall(ack.encode('utf-8'))
except BlockingIOError:
continue

Redirect instead of HTTP403

I'm trying to make a proxy with a Blocklist. I initially used 403 when a user goes to a blocked page. However, it doesn't work with HTTPS and returns ERR_TUNNEL_CONNECTION_FAILED as explained in Respond with 403 in an HTTPS proxy
Thus, I want to redirect the user to a html page like this
This is my code:
import socket
import threading
import signal
import sys
import fnmatch
import errno
import time
import pdb
import re
from time import gmtime, strftime, localtime
import logging
import config
import rule
import tag as tag_store
from Ignite import flame
core = flame()
p=re.compile('(http:\/\/)?([\w\.-]*)(\:(\d*))?(\/.*)?')
thread_logger = logging.getLogger('thread')
access_logger = logging.getLogger('access')
csv_logger = logging.getLogger('csv')
def proxy(browser_conn, client_addr):
print("hi")
def ishostAllowed(host):
print("\n\nHost:")
print(str(host))
access_logger.info(str(host))
if host.split('.')[-1].isdigit():
thread_logger.warn("Invalid host:".format(host),extra=req)
return core.check_allow(host)
#pdb.set_trace()
tags=tag_store.get(host)
if not tags:
thread_logger.warn("{0} isn't allowed: empty tags".format(host),extra=req)
return core.check_allow(host)
for tag in tag_store.get(host):
if not rule.isTagAllowed(tag):
thread_logger.warn("{0}:{1} isn't allowed".format(host,tag),extra=req)
return core.check_allow(host)
return core.check(host)
def proxy_http(request):
try:
# create a socket to connect to the web server
#pdb.set_trace()
server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_conn.settimeout(config.connection_timeout)
server_conn.connect((request['host'], request['port']))
server_conn.sendall(request['raw_data']) # send request to webserver
while 1:
data = server_conn.recv(config.max_request_len) # receive data from web server
if (len(data) > 0):
browser_conn.send(data) # send to browser
else:
break
except socket.error as error_msg:
thread_logger.error(str(error_msg)+":"+str(request),extra=req);
finally:
if server_conn:
server_conn.close()
if browser_conn:
browser_conn.close()
return
def response(status,message):
reply = "HTTP/1.1 {0} {1}\r\n"
reply += "Proxy-agent: Sinktrap\r\n"
reply += "\r\n"
reply = reply.format(status,message);
#pdb.set_trace()
browser_conn.sendall( reply.encode() )
def proxy_https(request):
#pdb.set_trace()
try:
server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# If successful, send 200 code response
server_conn.connect((req['host'], req['port']))
response(200,'Connection established')
except socket.error as err:
# If the connection could not be established, exit
# Should properly handle the exit with http error code here
thread_logger.error("Cannot establish https connection:"+err,extra=req);
if server_conn:
server_conn.close()
if browser_conn:
browser_conn.close()
return
# Indiscriminately forward bytes
browser_conn.setblocking(0)
server_conn.setblocking(0)
timeout=time.time()+60 # 1 minute
while timeout-time.time()>0:
request_done=False
replied_done=False
try:
request =browser_conn.recv(config.max_request_len) # receive data from browser
if (len(request) > 0):
server_conn.sendall(request) # send to web server
else:
request_done=True
#hread_logger.info("REQUEST len: " + str(len(request)),extra=req);
except socket.error as e:
if e.errno==errno.EWOULDBLOCK:
time.sleep(0.1)
pass
else:
thread_logger.error("pipe error:"+str(e),extra=req);
break
try:
reply = server_conn.recv(config.max_request_len) # receive data from web server
if (len(reply) > 0):
browser_conn.sendall(reply) # send to browser
else:
replied_done=True
#thread_logger.info("reply len: " + str(len(reply)),extra=req);
except socket.error as e:
if e.errno==errno.EWOULDBLOCK:
time.sleep(0.1)
pass
else:
thread_logger.error("pipe error:"+str(e),extra=req);
break
if request_done and replied_done:
break
server_conn.close()
browser_conn.close()
raw_data = browser_conn.recv(config.max_request_len) # get the request from browser
req={'raw_data':raw_data,
'tname' : threading.currentThread().getName(),
'client_ip' : client_addr[0],
'client_port' : client_addr[1]
}
thread_logger.info("REQUEST: {0}".format(raw_data),extra=req);
#pdb.set_trace()
try:
# request_line is the first one. https://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html
msg_body_pos=len(raw_data)
for i in range(4,len(raw_data)):
if raw_data[i-4:i].decode()=='\r\n\r\n':
msg_body_pos=i
break
lines=raw_data[:msg_body_pos-4].decode('utf-8').split('\r\n')
if len(lines[0])<16:
thread_logger.warn("INVALU REQUEST:{0}".format(raw_data),extra=req);
return
headers = {k:v for k,v in (x.split(':',1) for x in lines[1:]) }
if 'Referer' in headers:
req['Referer']=headers['Referer']
else:
req['Referer']=''
if 'User-Agent' in headers:
req['User-Agent']=headers['User-Agent']
else:
req['User-Agent']=''
req['request_line'] =lines[0]
req['method'],req['request_uri'],req['http_version']=lines[0].split(' ')
#check if the first line is valid request. request_line might be empty
if not req['method'] or not req['request_uri'] or not req['http_version']:
thread_logger.warn("INVALU REQUEST:{0}".format(raw_data),extra=req);
return
except Exception as e:
thread_logger.error("INVALU REQUEST:{0} {1}".format(e, raw_data),extra=req);
logging.exception("INVALU REQUEST")
return
access_logger.info("",extra=req)
#pdb.set_trace()
m=p.match(req['request_uri'])
req['host']=m.group(2)
req['port']=int(m.group(4)) if m.group(4) else 80
# Check if request is allowed or not
if not ishostAllowed(req['host']):
csv_logger.info("blocked",extra=req);
thread_logger.warn("Block REQUEST:{0}".format(raw_data),extra=req);
response(403,"The website has been blocked by Ignite's proxy.")
#Breq = req
#Breq['host'] = "azlancoding.github.io/Blocked"
#proxy_https(Breq)
#response(307,"https://azlancoding.github.io/BLOCKED")
return
csv_logger.info("allowed",extra=req);
#pdb.set_trace()
if req['method']=='CONNECT':
proxy_https(req)
else:
proxy_http(req)
The original proxy is pcxy
See my github project here

UDP Tracker only gives me my ip as answer to announce request

I've recently been trying to create a torrent client in python, and have just got the UDP announce protocol to work.
The tracker accepts my connect request just fine but only returns my IP and port as the peer list when I announce to it...
I've tried to look at the same torrents in other torrent clients and they have multiple working peers while my request only shows me my computer (I've tried this on many torrents, all return just my IP and port)
Here's the code for the sending function itself:
async def announce_udp(self, try_num = 1):
self.sock.settimeout(15)
answer = {}
inner_while = False
while try_num < 4:
while try_num < 4:
try:
print("trying to send")
sended = self.send(1, self.announce_payload())
print("sending the following packet: {0}".format(sended))
print(self.url)
inner_while = True
break
except Exception:
print("problem in sending")
try_num += 1
if not inner_while:
break
try:
answer = self.interpret(15)
break
except Exception:
print("problem in receiving")
try_num += 1
print("announce answer is: {0}".format(answer))
return answer
here's the code for the make payload function:
def announce_payload(self, downloaded = 0, left = 0, uploaded = 0, event = 0, key = get_transaction_id()):
payload = [self.torrent.get_torrent_info_hash_decoded(), get_peer_id().encode(), downloaded,
self.torrent.get_torrent_size(), uploaded, event, 0, key, -1, 6988]
p_tosend = None
try:
p_tosend = struct.pack('!20s20sqqqiIIiH', *payload)
except Exception as e:
print("there was an error: {0}".format(e))
return p_tosend
here's the code for the interpret + process function:
def interpret(self, timeout=10):
self.sock.settimeout(timeout)
print("got to interpret")
try:
response = self.sock.recv(10240)
print("answer recieved")
except socket.timeout:
print("no answer, try again")
raise TrackerResponseException("no answer", 0)
headers = response[:8]
payload = response[8:]
action, trans_id = struct.unpack('!ll', headers)
try:
trans = self.transactions[trans_id]
except KeyError:
raise TrackerResponseException("InvalidTransaction: id not found", trans_id)
try:
trans['response'] = self.process(action, payload, trans)
except Exception as e:
trans['response'] = None
print("error occured: {0}".format(e))
trans['completed'] = True
del self.transactions[trans_id]
#print(trans)
return trans
def process_announce(self, payload, trans):
response = {}
info = payload[:struct.calcsize("!lll")]
interval, leechers, seeders = struct.unpack("!lll", info)
print(interval, leechers, seeders, "noamsssssss")
peer_data = payload[struct.calcsize("!lll"):]
peer_size = struct.calcsize("!lH")
num_of_peers = int(len(peer_data) / peer_size)
print("the number of peers is: {0} and the peer data is: {1}".format(num_of_peers, peer_data))
print()
peers = []
for peer_offset in range(num_of_peers):
off = peer_size * peer_offset
peer = peer_data[off:off + peer_size]
addr, port = struct.unpack("!lH", peer)
peers.append({
'addr': socket.inet_ntoa(struct.pack('!L', addr)),
'port': port,
})
print(payload)
return dict(interval=interval, leechers=leechers, seeders=seeders, peers=peers)
I'm sorry if any of this is irrelevant, but I want to give you all of the code incase it tells you something.
(get_peer_id() returns a random peer id per the tracker protocol specification, and the get_transaction_id() returns random.randint(0, 1 << 32 - 1))
EDIT:
Alright, I've found the problem and now I'm feeling pretty dumb...
turns out even in the udp tracker whenever you send the info hash it has to be SHA1 encoded.
Hopefully this can help someone if they are stuck in the same problem :)

How to receive all data after TCP connection has been closed by the peer?

Running some production code I faced the following problem:
When sending HTTP requests to some server, server immediately closes the connection after sending response, which, for some reason, results in data loss.
Analyzing TCP dumps i can see that conversation goes as this:
client request
server ack
server push
server fin, ack (sent after ~0.000020 secs after previous push)
As the result my code can't get data sent by the server, (i'm guessing because of the small delay after push POLLHUP event might go before POLLIN?)
Trying to mimic the problem I've written the following code:
(It mimics the client behaviour on my side)
client:
import time
import socket
from errno import EAGAIN
from select import poll, POLLIN, POLLPRI, POLLERR, POLLHUP, POLLNVAL
def main(buf=""):
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.setblocking(False)
client.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
polling_object = poll()
polling_object.register(client, POLLPRI | POLLIN | POLLERR | POLLHUP)
in_buf = ""
sock_closed = False
try:
client.connect(("127.0.0.1", 8877))
except socket.error, e:
pass
while True and not sock_closed:
events = polling_object.poll(0)
for _, e in events:
if e & (POLLIN | POLLPRI):
while True:
try:
data = client.recv(1024)
if data:
in_buf += data
elif data == "":
client.close()
sock_closed = True
break
except socket.error, e:
if e.args[0] == EAGAIN:
break
else:
raise
elif e & (POLLERR|POLLHUP|POLLNVAL):
client.close()
sock_closed = True
if buf and not sock_closed:
try:
b_sent = client.send(buf)
if b_sent == len(buf):
buf = ""
else:
buf = buf[b_sent:]
except socket.error, e:
if e.args[0] != EAGAIN:
client.close()
sock_closed = True
time.sleep(0.5)
if sock_closed:
return in_buf
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
buf = sys.argv[1]
else:
buf = 'hello'
print main(buf)
server
import datetime
import time
import socket
def main():
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("127.0.0.1", 8877))
server.listen(0)
client, _ = server.accept()
t1 = time.time()
data = ""
while not data:
data += client.recv(1024)
print "recv data %s" % data
client.sendall('{"ok": 1}')
t2 = time.time()
client.close()
t3 = time.time()
server.close()
return t1, t2, t3
if __name__ == '__main__':
c_r, d_s, c_c = main()
print "Connection received at ", datetime.datetime.fromtimestamp(c_r)
print "All Data sent after %.12f secs" % (d_s - c_r)
print "Connection closed after %.12f secs" % (c_c - d_s)
Running this code won't help me reproduce the problem because my client still can get data from socket buffer, which is kind of obviously by just following the code. The only difference is that in tcp dump it goes like this:
client request
server ack
server push
client ack
server fin, ack
I'm wondering is there a way to send fin, ack right after push without "letting" client to sent ack? Can it be done using python?

My python proxy server keeps giving me "The connection was reset"

I have been writing a transparent proxy server in python to log where the request is going. Most pages load e.g. google.co.uk, however, pages such as google.com get stuck loading and some pages such as a local IP get the "Connection reset" error in the browser.
Any help would be greatly appreciated.
#!/usr/bin/env python
import socket, optparse, thread
def proxy(url, port, connection, address, data):
try:
get = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
get.connect((url, port))
get.send(data)
while True:
reply = get.recv(BUFFER)
if len(reply) > 0:
connection.send(reply)
info = float(len(reply))
info = float(info / 1024)
info = "%.3s" %(str(info))
info = "%s KB" %(info)
print("[*] Request Complete: %s => %s <=" %(str(address[0]), str(info)))
else:
break
get.close()
connection.close()
except Exception as e:
get.close()
connection.close()
def handle(connection, address, data):
first = data.split("\n")[0]
url = first.split(" ")[1]
protocolPosition = url.find("://")
if protocolPosition == -1:
# No protocol so default
temp = url
else:
temp = url[(protocolPosition + 3):]
if ":" in temp:
# Port other than 80 has been specified
port = temp.split(":")[-1].strip("/")
webserver = temp.split(":")[:-1]
try:
# Incase there is ':' in the URL
webserver = "".join(webserver)
except:
pass
else:
port = 80
webserver = temp.strip("/")
print("[*] '%s' => '%s'" %(address[0], webserver))
proxy(webserver, port, connection, address, data)
receive = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
receive.bind(("0.0.0.0", PORT))
except socket.error as e:
print("Failed to bind to 0.0.0.0:%d" %(PORT))
print("Error: " + str(e))
raise SystemExit
receive.listen(MAXCONNECTIONS)
print("Listening on 0.0.0.0:%d" %(PORT))
while True:
try:
connection, address = receive.accept()
data = connection.recv(BUFFER)
thread.start_new_thread(handle, (connection, address, data,))
except KeyboardInterrupt:
break
print("\nReleasing socket")
receive.close()
Edit: After some digging around and error handling I narrowed the error down to
[Errno -2] Name or service not known

Categories