I have a python server that runs using an ssl wrapped socket. I am trying to send an https GET and POST request from an R-shiny client to the python server, yet I get the following error:
Listening on http://127.0.0.1:6565
Warning: Error in curl::curl_fetch_memory: schannel: Failed to get certificate location for >c/cygwin64/home/user/Documents/server/cert.pem
My overarching goal is to type a text into the shiny R text input, send this text to the python server in the form of a POST request over a secure channel. The python server I will run the interpreter() function on the string I just sent and return to the R-shiny client the appropriate response.
This setup has worked when not using ssl-certificates, however, after implementing the python ssl wrapped socket, I have been getting this error.
I hope some of you might be able to illuminate me on this matter.
This is my python server
import ssl
from http.server import HTTPServer, BaseHTTPRequestHandler
from io import BytesIO
def interpreter(val):
if val == "Search":
return "Searched"
elif val == "Add":
return "Added"
else:
return "Invalid statement"
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/csv")
self.end_headers()
self.wfile.write(bytes(
"<html><body><h1>HELLO world!</h1></body></html>", "utf-8"))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
print(body.decode())
modified_body = interpreter(body.decode())
print(modified_body)
self.send_response(200)
self.end_headers()
response = BytesIO()
response.write(b'Received: ')
response.write(body)
response.write(b' End of request')
self.wfile.write(response.getvalue())
httpd = HTTPServer(('localhost', 8000), SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket(httpd.socket, keyfile="key.pem",
certfile="cert.pem", server_side=True)
print("Server Running Now...")
httpd.serve_forever()
httpd.server_close()
print("Server Stopped")
And my shiny-R application is as follows
library(shiny)
library(httr)
ui <- fluidPage(
titlePanel("Request Module"),
fluidRow(
column(3,
textInput(inputId = "request", label="Enter Request Term"),
actionButton(inputId = "getRequest", label="Get"),
actionButton(inputId = "postRequest", label="Post")
)
),
fluidRow(
wellPanel(
htmlOutput(outputId = "resultOutput")
)
)
)
server <- function(input, output) {
url <- "http://127.0.0.1:8000"
s_url <- "https://127.0.0.1:8000"
httr::set_config(config(ssl_verifypeer = 0L))
certfile="c/cygwin64/home/user/Documents/server/cert.pem"
keyfile="c/cygwin64/home/user/Documents/server/key.pem"
observeEvent(input$getRequest,{
getResponse <- GET(url = s_url,
config(sslcert=certfile, sslkey=keyfile))
output$resultOutput <- renderPrint({
getResponse$content
})
})
observeEvent(input$postRequest,{
reqMessage <- input$request
postResult <- POST(url = s_url,
body = reqMessage,
content_type("text/csv"),
encoding = "UTF-8",
config(sslcert=certfile, sslkey=keyfile))
processedResponse <- content(postResult, as="text", encoding = "UTF-8")
output$resultOutput <- renderPrint({
processedResponse
})
})
}
shinyApp(ui = ui, server = server)
Related
I'm trying to write a simple 'https over http tunnel' server in python.
Every other thing works out fine except the connection between the client and the server persist and ends up blocking( forever ).
I'm pretty sure they carry out the SLL handshake because they both send and receive a couple of times before it hangs.
here's the server code:
import socket
import threading
class SocketWrapper:
def __init__(self,sock = None):
if sock is None:
self.socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
else:
self.socket = sock
def connect(self,host,port):
try:
self.socket.connect((host,int(port)))
return True
except socket.error:
return False
def close(self):
# close the socket connection
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
def send(self,data):
bytes_sent = 0
msg_len = len(data)
while bytes_sent < msg_len:
sent = self.socket.send(data[bytes_sent:])
bytes_sent += sent
def receive(self):
chunks = []
while True:
try:
self.socket.settimeout(0.5)
chunk = self.socket.recv(4096)
chunks.append(chunk)
except socket.error:
self.socket.settimeout(0)
break;
return b''.join(chunks)
class HttpTunnel:
def __init__(self,host='localhost',port=3000):
# create the server socket,bind and listen
self.host,self.port = host,port
self.socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
self.socket.bind((self.host,self.port))
self.socket.listen(3)
print("listening on port",self.port)
self.running = True
def handleClientRequest(self,connection,address):
print("Connected to",address)
clientSocket = SocketWrapper(connection)
meta = clientSocket.receive().decode().split('\r\n')[0]
# getting method,uri,version from 'CONNECT host:port HTTP/1.1'
method,uri,version = meta.split(' ')
host,port = uri.split(':')
serverSocket = SocketWrapper()
# if connection to the remote server is created successfuly
if(serverSocket.connect(host,port)):
print("Connected to remote server")
# send connection success message to the client
clientSocket.send(b'HTTP/1.1 200 OK\r\n\r\n');
while True:
try:
clientResponse = clientSocket.receive()
serverSocket.send(clientResponse)
print("Sent client - server")
serverResponse = serverSocket.receive()
clientSocket.send(serverResponse)
print("Sent server - client")
except socket.error:
break;
else:
# send someking of error. In this case 404
serverSocket.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
# close the connection
clientSocket.close()
serverSocket.close()
def mainloop(self):
while self.running:
# accept client connection
connection,address = self.socket.accept()
self.handleClientRequest(connection,address)
proxy = HttpTunnel()
proxy.mainloop()
the client code:
import urllib
import urllib.request as request
proxy = request.ProxyHandler({
'https':'https://127.0.0.1:3000'
})
opener = request.build_opener(proxy)
request.install_opener(opener)
try:
resp = request.urlopen('https://google.com')
print(resp.read())
except Exception as e:
print(e)
the client did not get the response from the server and therefore prints nothing.
here's the server output:
listening on port 3000
Connected to ('127.0.0.1', 54888)
Connected to remote server
Sent client - server
Sent server - client
Sent client - server
Sent server - client
Sent client - server
There are several problems here:
The main problem is that you don't handle the case when recv returns 0 since the socket gets closed. Instead you run into an endless loop where no data get read and no data get send. Some simple print statements which actually show how much data are read would have helped to track this problem down.
Apart from that the idea of polling each file handle after each other using settimeout is a bad approach. Instead check the file handles in parallel and then read from the one which has data - see select.
And finally you are assuming that socket.send will send all data given. This is not the case, it might send less. Check the return code or just use socket.sendall
I have seen code like this that shows how to use a proxy for python requests.
import requests
proxies = {
'http': 'http://localhost:7777',
'https': 'http://localhost:7777',
}
requests.get('http://example.org', proxies=proxies)
requests.get('https://example.org', proxies=proxies)
But I am wondering how can we make a very simple proxy server in Python that would be able to respond to the GET request?
You can find many examples how to do it - even in questions on Stackoverflow.
Some of them use standard module socket (but it doesn't look simply).
Other use standard module http but they show code for Python 2 which was using different names.
Version for Python 3
import http.server
import socketserver
import urllib.request
class MyProxy(http.server.SimpleHTTPRequestHandler):
def do_GET(self):
print(self.path)
url = self.path
self.send_response(200)
self.end_headers()
self.copyfile(urllib.request.urlopen(url), self.wfile)
# --- main ---
PORT = 7777
httpd = None
try:
socketserver.TCPServer.allow_reuse_address = True # solution for `OSError: [Errno 98] Address already in use`
httpd = socketserver.TCPServer(('', PORT), MyProxy)
print(f"Proxy at: http://localhost:{PORT}")
httpd.serve_forever()
except KeyboardInterrupt:
print("Pressed Ctrl+C")
finally:
if httpd:
httpd.shutdown()
#httpd.socket.close()
Test using page httpbin.org
import requests
proxies = {
'http': 'http://localhost:7777',
'https': 'http://localhost:7777',
}
response = requests.get('http://httpbin.org/get', proxies=proxies)
print(response.text)
response = requests.get('http://httpbin.org/get?arg1=hello&arg2=world', proxies=proxies)
print(response.text)
But it works only for HTTP.
For HTTPS it may need to use ssl.socket from module ssl.
And it works only with GET.
For POST, PUT, DELETE, etc. it would need do_POST, do_PUT, do_DELETE, etc. with different code.
EDIT:
def do_POST(self):
url = self.path
# - post data -
content_length = int(self.headers.get('Content-Length', 0)) # <--- size of data
if content_length:
content = self.rfile.read(content_length) # <--- data itself
else:
content = None
req = urllib.request.Request(url, method="POST", data=content)
output = urllib.request.urlopen(req)
# ---
self.send_response(200)
self.end_headers()
self.copyfile(output, self.wfile)
But if you need local proxy only to test your code then you could use
Python module/program: mitmproxy (Man-In-The-Middle-Proxy)
not-python, not-free (but work 30 days for free), with nice GUI: Charles Proxy
More complex OWASP ZAP, Burp Suite (community edition)
Below is what I have tried.
import http.server
import socketserver
import requests
PORT = 8000
Handler = http.server.SimpleHTTPRequestHandler
with socketserver.TCPServer(("", PORT), Handler) as httpd:
print("serving at port", PORT)
httpd.serve_forever()
def api(data):
r = requests.post('http://localhost:8000/api', json=data)
return r.json()
Getting below error with above code.
ConnectionRefusedError: [WinError 10061] No connection could be made because the target machine actively refused it
Postman should be able to send post request having json body.
You didn't show full error message and I don't use Windows to test it but SimpleHTTPRequestHandler doesn't have function do_POST to receive POST request and this can make problem.
You will have to use SimpleHTTPRequestHandler to create own class with do_POST.
And this function will need to
get header information
read JSON string
convert request data from JSON string to dictionary
convert response data from dictionary to JSON string
send headers
send JSON string
so it will need a lot of work.
Minimal working server
import http.server
import socketserver
import json
PORT = 8000
class MyHandler(http.server.SimpleHTTPRequestHandler):
def do_POST(self):
# - request -
content_length = int(self.headers['Content-Length'])
#print('content_length:', content_length)
if content_length:
input_json = self.rfile.read(content_length)
input_data = json.loads(input_json)
else:
input_data = None
print(input_data)
# - response -
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
output_data = {'status': 'OK', 'result': 'HELLO WORLD!'}
output_json = json.dumps(output_data)
self.wfile.write(output_json.encode('utf-8'))
Handler = MyHandler
try:
with socketserver.TCPServer(("", PORT), Handler) as httpd:
print(f"Starting http://0.0.0.0:{PORT}")
httpd.serve_forever()
except KeyboardInterrupt:
print("Stopping by Ctrl+C")
httpd.server_close() # to resolve problem `OSError: [Errno 98] Address already in use`
And testing code
import requests
data = {'search': 'hello world?'}
r = requests.post('http://localhost:8000/api', json=data)
print('status:', r.status_code)
print('json:', r.json())
This example doesn't check if you run /api or /api/function or /api/function/arguments because it would need much more code.
So pure python API without framework can need a lot of work and it can be waste of time.
The same code with Flask. It is much shorter and it already checks if you send to /api.
from flask import Flask, request, jsonify
app = Flask(__name__)
#app.route('/api', methods=["GET", "POST"])
def api():
input_data = request.json
print(input_data)
output_data = {'status': 'OK', 'result': 'HELLO WORLD!'}
return jsonify(output_data)
if __name__ == '__main__':
#app.debug = True
app.run(host='0.0.0.0', port=8000)
BTW:
If you want to test post data then you can use portal http://httpbin.org and send POST request to http://httpbin.org/post and it will send back all data and headers.
It can be used also for other requests and data.
This portal was created with Flask and there is even link to source code so you can install it on own computer.
It seems httpbin is part of Postman repo on GitHub.
Im trying to create a simple HTTP server that will receive POST messages and provide a simple response. Im using the standard HTTPServer with python. The client connects using a session() which should use a persistent connection but after each POST I see the message below in the debug that the connection is dropping.
INFO:urllib3.connectionpool:Resetting dropped connection:
DEBUG:urllib3.connectionpool:"GET / HTTP/1.1" 200 None
The client works properly when I try it with Apache so I believe the issue is in my simple server configuration. How can I configure the simple http server to work with persistent connections?
Simple Server Python Code:
from http.server import HTTPServer, BaseHTTPRequestHandler
from io import BytesIO
import time
import datetime
import logging
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def _set_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header("Connection", "keep-alive")
self.send_header("keep-alive", "timeout=5, max=30")
self.end_headers()
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello, world!')
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
curr_time = datetime.datetime.now()
data = ('{"msgid":"0x0002", "timestamp": "'+ str(curr_time) +'", "message":"Test http response from Raspberry Pi HTTP server"}').encode()
self.send_response(200)
self.end_headers()
response = BytesIO()
#response.write(b'This is POST request. ')
#response.write(b'Received: ')
response.write(data)
self.wfile.write(response.getvalue())
print("Simple HTTP Server running...")
logging.basicConfig(level=logging.DEBUG)
httpd = HTTPServer(('', 8000), SimpleHTTPRequestHandler)
httpd.serve_forever()
Client Python code:
#!/usr/bin/env python
# Using same TCP connection for all HTTP requests
import os
import json
import time
import datetime
import logging
import requests
from requests.auth import HTTPBasicAuth
logging.basicConfig(level=logging.DEBUG)
start_time = time.time()
def get_data(limit):
session = requests.Session()
url = "http://localhost:8000"
for i in range(10):
curr_time = datetime.datetime.now()
data = '{"msgid":"0x0001", "timestamp": "'+ str(curr_time) +'", "message":"Test http message from Raspberry Pi"}'
print("Sending Data: " + data)
response = session.post(url.format(limit), data)
#response_dict = json.loads(response.text)
print("Received Data: " + response.text)
if __name__ == "__main__":
limit = 1
get_data(limit)
print("--- %s seconds ---" % (time.time() - start_time))
You aren't actually setting the Connection header in your POST handler. In order for persistent connections to work, you'll also need to set the Content-Length header in the response so that client knows how many bytes of the HTTP body to read before reusing the connection.
Try this POST handler, adapted from your code:
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
# Process the request here and generate the entire response
response_data = b'{"stuff": 1234}'
# Send the response
self.send_response(200)
self.send_header("Connection", "keep-alive")
self.send_header("Content-Length", str(len(response_data)))
self.end_headers()
# Write _exactly_ the number of bytes specified by the
# 'Content-Length' header
self.wfile.write(response_data)
I wrote the following code to accept a HTTP POST and then write out a temp file that includes the POST data and then send that temp file to a printer using subprocess and the UNIX lp command.
from http.server import HTTPServer, BaseHTTPRequestHandler
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello, world!')
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
try:
result = json.loads(body, encoding='utf-8')
# Do other stuff with result
p = subprocess.Popen(['/usr/bin/env', 'lp', '-d', printer_queue, temp.name], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.send_response(200)
self.end_headers()
response = BytesIO()
response.write(b'POST Received: ')
response.write(body)
self.wfile.write(response.getvalue())
except Exception as err:
tb = traceback.format_exc()
print(tb)
self.send_response(500) # 500 Internal Server Error
self.end_headers()
response = BytesIO()
response.write(b'ERROR: Blah')
self.wfile.write(response.getvalue())
httpd = HTTPServer(('localhost', 8000), SimpleHTTPRequestHandler)
and everything was awesome. Then I read that HTTPServer shouldn't be used in Production and everything was no longer awesome.
So how can I write the equivalent code that can be used as a production server? I have a Apache web server, but I'm not sure how to add the above Python code to it (preferrably without changing the above code too much since there is a lot of it).
I found out a way to connect your code with nginx server. At first add some code with your function add create socket and after that write a nginx conf file. it will work
Step 1 :
add main() function in your function
from http.server import HTTPServer, BaseHTTPRequestHandler
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello, world!')
def do_POST(self):
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
try:
result = json.loads(body, encoding='utf-8')
# Do other stuff with result
p = subprocess.Popen(['/usr/bin/env', 'lp', '-d', printer_queue, temp.name], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.send_response(200)
self.end_headers()
response = BytesIO()
response.write(b'POST Received: ')
response.write(body)
self.wfile.write(response.getvalue())
except Exception as err:
tb = traceback.format_exc()
print(tb)
self.send_response(500) # 500 Internal Server Error
self.end_headers()
response = BytesIO()
response.write(b'ERROR: Blah')
self.wfile.write(response.getvalue())
def main():
try:
server = HTTPServer(('localhost', 8000), SimpleHTTPRequestHandler)
print ('Starting BaseServer.')
server.serve_forever ()
except KeyboardInterrupt:
print ('Interrupt recieved; closing server socket')
server.socket.close()
if __name__ == '__main__':
main()
Step 2 :
nginx.conf file should be like this
server {
location / {
root /data/www;
}
location / {
proxy_pass http://localhost:8000;
}
}
If face any issue comment below