Mootools Request getting "501 Unsupported method ('OPTIONS')" response - python

I have this mootools request:
new Request({
url: 'http://localhost:8080/list',
method: 'get',
}).send();
and a small python server that handles it with this:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import subprocess
class HttpHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == '/list':
self.list()
else:
self._404()
def list(self):
self.response200()
res = "some string"
self.wfile.write(res)
def _404(self):
self.response404()
self.wfile.write("404\n")
def response200(self):
self.send_response(200)
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Headers', 'X-Request, X-Requested-With')
self.send_header('Content-type', 'application/json')
self.end_headers()
def response404(self):
self.send_response(404)
self.send_header('Content-type', 'application/json')
self.end_headers()
def main():
try:
server = HTTPServer(('', 8080), HttpHandler)
server.serve_forever()
except KeyboardInterrupt:
server.socket.close()
if __name__ == '__main__':
main()
When I attempt to make this request, I get these errors:
OPTIONS http://localhost:8080/ 501 (Unsupported method ('OPTIONS'))
XMLHttpRequest cannot load http://localhost:8080/. Origin null is not allowed by Access-Control-Allow-Origin.
I'm not sure what's going on. Can someone help me out??

exactly as the response string tells you: OPTIONS http://localhost:8080/ 501 (Unsupported method ('OPTIONS'))
When javascript attempts to request a resource from another origin, modern browsers first ask the other server, the target, if it is ok to make that request from another origin, that's exactly what the Access-Control* headers do. but this request does not happen in a normal GET, since that would be actually performing the request anyway, and instead use the OPTIONS method, which exists for the sole reason to inform clients what they are allowed to do, without actually doing it.
So, you need a do_OPTIONS method, which might look something like:
def do_OPTIONS(self):
if self.path in ('*', '/list'):
self.send_response(200)
self.send_header('Allow', 'GET, OPTIONS')
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Headers', 'X-Request, X-Requested-With')
else:
self.send_response(404)
self.send_header('Content-Length', '0')
self.end_headers()

Related

Create HTTP get response with attached binary data file and json formatted metadata

I'm using BaseHTTPServer.BaseHTTPRequestHandler in order to implement my server.
currently I repsonse to get request with merely binary data file.
self.send_response(200)
self.send_header("Content-Type", 'application/octet-stream')
self.send_header("Content-Disposition", 'attachment; filename="{}"'.format(os.path.basename(FILEPATH)))
fs = os.fstat(f.fileno())
self.send_header("Content-Length", str(fs.st_size))
self.end_headers()
Now it's requested to add another section which include some short json formatted configuration data (i.e. {'status': 'ok', 'type': 'keepalive'}) and i'd rather pass this information on the same response separated by unique http header or by the http body.
What is the best way to do so ? I'd like to know how to extend my code to support this.
Thanks
There's lots of ways to do this, I think the best choice is going to depend on what your receiving side is capable of understanding most easily.
The most literal interpretation would be to use content-type multipart/mixed https://www.w3.org/Protocols/rfc1341/7_2_Multipart.html but you're probably going to have to write your own parsing on the receiving end. I don't know if this is exactly to spec, but it gets the idea across:
from http.server import BaseHTTPRequestHandler
import http.server
import socketserver
import string
import random
import io
PORT = 8000
class ResponsePart:
def __init__(self, content, content_type):
self.content = content.encode('utf-8')
self.content_type = content_type.encode('utf-8')
class Mine(http.server.BaseHTTPRequestHandler):
def get_separator(self, parts):
while True:
boundary = []
for i in range(32):
boundary.append(random.choice(string.digits + string.ascii_letters))
boundary = ''.join(boundary).encode('ascii')
for part in parts:
if boundary in part:
break
else:
return boundary
def do_GET(self):
responses = [
ResponsePart('abc123', 'Content-type: application/octet-stream'),
ResponsePart('{"a":"b"}', 'Content-type: application/json'),
]
boundary = self.get_separator([r.content for r in responses])
self.send_response(200)
self.send_header("Content-Type", 'multipart/mixed; boundary=' + boundary.decode('ascii'))
self.end_headers()
for piece in responses:
self.wfile.write(b'--')
self.wfile.write(boundary)
self.wfile.write(b'\r\n')
self.wfile.write(piece.content_type)
self.wfile.write(b'\r\n')
self.wfile.write(piece.content)
self.wfile.write(b'\r\n')
Handler = Mine
with socketserver.TCPServer(("", PORT), Handler) as httpd:
httpd.serve_forever()
With that out of the way, I'd do this probably using JSON or something so that you're returning a single consistent content-type:
from http.server import BaseHTTPRequestHandler
import http.server
import socketserver
import string
import random
import io
import json
PORT = 8000
class Mine(http.server.BaseHTTPRequestHandler):
def do_GET(self):
response = {
'filedata': 'abc123',
'status': {"a":"b"},
}
output_data = json.dumps(response).encode('utf-8')
self.send_response(200)
self.send_header("Content-Type", 'application/octet-stream')
self.send_header("Content-Length", str(len(output_data)))
self.end_headers()
self.wfile.write(output_data)
Handler = Mine
with socketserver.TCPServer(("", PORT), Handler) as httpd:
httpd.serve_forever()
This is going to be far easier to handle on the receiving end, one json decode and you're done.

Is there some caching or forking happening in `HTTPServer` or` BaseHTTPRequestHandler`?

It could be my code is wrongly implemented, but I'm finding that while I can serve up GET requests from literal data, I cannot update that data and have it shown as updated in subsequent GET requests. I also cannot have POST requests update the data.
So it behaves as though somewhere in Python's HTTPServer or BaseHTTPRequestHandler there's caching or forking happening.
Thanks in advance for looking it over, but, gently, no, I do not want to use a non-core 3.8 module or re-write with a wholly different framework or some Flask. I think this should work, but it's misbehaving in a way I can't spot why. If I were using C or Go's built in libraries it'd expect it would not be as much of a head scratcher (for me).
To demonstrate, you'd run the following python implementation, and load http://127.0.0.1:8081/ two or three times:
"""
A Quick test server on 8081.
"""
from http.server import HTTPServer, BaseHTTPRequestHandler
import cgi
import json
import os
import sys
ADDR = '127.0.0.1'
PORT = 8081
def run(server_class=HTTPServer, handler_class=BaseHTTPRequestHandler):
server_address = (ADDR, PORT)
with server_class(server_address, handler_class) as httpd:
print("serving at", ADDR, "on", PORT, f"[ http://{ADDR}:{PORT} ]")
try:
httpd.serve_forever()
except KeyboardInterrupt:
print(" stopping web server due to interrupt signal...")
httpd.socket.close()
class SimpleHandler(BaseHTTPRequestHandler):
"""
Implements responses to GET POST
"""
def __init__(self, request, client_address, server):
"""Sets up the server's memory, a favicon, and one text pseudo-file."""
self.files = {
'/oh': ['text/plain', "It's me", ],
'/favicon.ico': [
'image/svg+xml',
'<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48"><text y="1em" font-size="48">⁇</text></svg>',
],
}
self.head = '<link rel="icon" type="image/svg+xml" sizes="48x48" '\
'href="/favicon.ico">'
super(SimpleHandler, self).__init__(request, client_address, server)
def _set_headers(self, content_type='application/json', response=200):
self.send_response(response)
self.send_header("Content-type", content_type)
self.end_headers()
def _html(self, message, title='Simple Server', extra=""):
"""This generates HTML with `message` in the h1 of body."""
content = f"<html><head><title>{title}</title>{self.head}</head>" \
f"<body><h1>{message}</h1>{extra}</body></html>"
return content.encode("utf8") # NOTE: must return a bytes object!
def do_GET(self):
"""Respond to a GET request."""
if self.path == "/":
self._set_headers('text/html')
fnames = [f'<li>{fn}</li>' for fn in self.files.keys()]
fnames.sort()
self.wfile.write(self._html(
"Welcome",
extra='Try:'
'<ul>'
'<li>/hello</li>'
f'{"".join(fnames)}'
'</ul>'
))
elif self.path == "/hello":
self._set_headers('text/html')
self.wfile.write(self._html("hello you"))
elif self.path in self.files:
content_type, content = self.files[self.path]
self.send_response(200)
self._set_headers(content_type)
self.wfile.write(content.encode())
else:
self.send_error(404)
# Note this update doesn't seem to happen to the in memory dict.
self.files[f"/{len(self.files)}"] = [
"text/html", self._html(len(self.files))]
def do_HEAD(self):
if self.path in ["/", "/hello"]:
self._set_headers('text/html')
elif self.path in self.files:
content_type, _ = self.files[self.path]
self._set_headers(content_type)
else:
self.send_error(404)
def do_POST(self):
"""Should update pseudo-files with posted file contents."""
ctype, pdict = cgi.parse_header(
self.headers.get('content-type', self.headers.get_content_type()))
print("POSTED with content type", ctype)
content = None
if ctype == 'application/x-www-form-urlencoded':
print(" * This multipart/form-data method might not work")
content = {"content": str(self.rfile.read(int(self.headers['Content-Length'])).decode())}
elif ctype == 'multipart/form-data':
print(" * This multipart/form-data method might not work")
fields = cgi.parse_multipart(self.rfile, pdict)
content = {"content": fields.get('content')}
elif ctype == 'application/json':
data_string = self.rfile.read(int(self.headers['Content-Length']))
content = json.loads(data_string)
else:
self.send_error(404)
print(" * Received content:", content)
# Note this update doesn't seem to happen to the in memory dict.
self.files[self.path] = ['application/json', content]
self._set_headers(response=201)
self.wfile.write(json.dumps(content).encode())
if __name__ == '__main__':
print('FYI:')
print(' LANG =', os.getenv('LANG'))
print(' Default Charset Encoding =', sys.getdefaultencoding())
path_to_script = os.path.dirname(os.path.realpath(__file__))
print('Serving from path:', path_to_script)
os.chdir(path_to_script)
run(handler_class=SimpleHandler)
Even before loading http://127.0.0.1:8081/ one could try posting to add something to the self.files dict. E.G.
curl -v -H 'content-type: application/json' \
--data-binary '{"this": "should work"}' http://127.0.0.1:8081/new_file
And you can see the server respond, and also print the data recieved, which should now be in self.files and therefore the / should show it.
You can mix it up with:
curl -v --data-urlencode 'content={"this": "should work"}' http://127.0.0.1:8081/new_file2
But neither of these add a self.files['/new_file'] or '/new_file2', and it's just not clear why.
One should be able to request /new_file or /new_file2 and those instead are 404.
With the last lines in do_GET, multiple GET / requests should show more listed items.
$ curl http://127.0.0.1:8081
<html><head><title>Simple Server</title><link rel="icon" type="image/svg+xml" sizes="48x48" href="/favicon.ico"></head><body><h1>Welcome</h1>Try:<ul><li>/hello</li><li>/favicon.ico</li><li>/oh</li></ul></body></html>
$ curl http://127.0.0.1:8081
<html><head><title>Simple Server</title><link rel="icon" type="image/svg+xml" sizes="48x48" href="/favicon.ico"></head><body><h1>Welcome</h1>Try:<ul><li>/hello</li><li>/favicon.ico</li><li>/oh</li></ul></body></html>
While moving those lines that add a new key and value into self.files to the top of do_GET shows that it does update, but only one time, which just seems odder still:
$ curl http://127.0.0.1:8081
<html><head><title>Simple Server</title><link rel="icon" type="image/svg+xml" sizes="48x48" href="/favicon.ico"></head><body><h1>Welcome</h1>Try:<ul><li>/hello</li><li>/2</li><li>/favicon.ico</li><li>/oh</li></ul></body></html>
$ curl http://127.0.0.1:8081
<html><head><title>Simple Server</title><link rel="icon" type="image/svg+xml" sizes="48x48" href="/favicon.ico"></head><body><h1>Welcome</h1>Try:<ul><li>/hello</li><li>/2</li><li>/favicon.ico</li><li>/oh</li></ul></body></html>
Okay, it turns out that a new SimpleHandler is made for each request, therefore I had to move the self.files out to the outer scope and also be careful what is set up during SimpleHandler's __init__. And that basically makes the behavior as I had expected.

Quick Rest API with Python for mocking responses

I am testing a C# application that make requests to another Rest API and I want to mock the server. I known basic python and I was wondering if I can write a simple API Rest server without be involved with large frameworks like Django. It would be a simple server where I receive a json through request body and I have to return another json (with the logic of the return inside, like a view).
Best regards!
Something simple like this:
#path(/api/v1/somepath, GET)
def my_function(request):
json_input = request.body.json()
# My logic here
response.status = 200
response.body = {'some_field': 'something'}
return response
In case you really don't want to use any external frameworks/ libraries, you can create a simple class which extends BaseHTTPRequestHandler, something like this :
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import json
class S(BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
def do_GET(self):
self._set_headers()
self.data_string = self.rfile.read(int(self.headers['Content-Length']))
self.send_response(200)
self.end_headers()
data = json.loads(self.data_string)
# your processing
outJson = {"success": True}
self.wfile.write(json.dumps(outJson))
def do_HEAD(self):
self._set_headers()
self.wfile.write("HEAD")
def do_POST(self):
self._set_headers()
self.wfile.write("POST")
and then start the server locally at the port of your choice(80 by default) simply like this:
def run(port=80):
httpd = HTTPServer(('', port), S)
print 'Starting httpd...'
httpd.serve_forever()
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run()
and if you need to manage lots of routes easily, you can use klein or Flask microframeworks (or even bottle) to make it easier and simpler for you, a simple minimal usage example for using klein would look something like this:
import json
from klein import Klein
class ItemStore(object):
app = Klein()
def __init__(self):
self._items = {}
#app.route('/')
def items(self, request):
request.setHeader('Content-Type', 'application/json')
return json.dumps(self._items)
#app.route('/<string:name>', methods=['PUT'])
def save_item(self, request, name):
request.setHeader('Content-Type', 'application/json')
body = json.loads(request.content.read())
self._items[name] = body
return json.dumps({'success': True})
#app.route('/<string:name>', methods=['GET'])
def get_item(self, request, name):
request.setHeader('Content-Type', 'application/json')
return json.dumps(self._items.get(name))
and then run the server like this:
if __name__ == '__main__':
store = ItemStore()
store.app.run('localhost', 8080)
Also, if you want to use the mock apis remotely and not just locally, you can use tunneling tools like ngrok. It's pretty simple and easy to use.

Python BaseHTTPRequestHandler: Respond with JSON

I have a Python class that inherits BaseHTTPRequestHandler and implements the method do_POST.
I currently only succeed to respond with an integer status, e.g. 200, using the following command at the end of the method:
self.send_response(200)
I am trying to also send some string as a part of the response. How should I do it?
At least in my environment (Python 3.7) i have to use
self.send_response(200)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json_str.encode(encoding='utf_8'))
otherwise this error will be thrown:
TypeError: a bytes-like object is required, not 'str'
It turns out to be pretty simple, though there aren't many examples for it.
Just use:
self.wfile.write(YOUR_STRING_HERE)
Specifically for the case of json:
import json
json_string = json.dumps(YOUR_DATA_STRUCTURE_TO_CONVERT_TO_JSON)
self.wfile.write(json_string)
It's an old question. Still, if someone else might be wondering the same, here's my 2 cent.
If you are doing anything useful, apart from playing around with python, you should start looking for standard python frameworks to handle HTTP server operations, like Django or Flask.
That being said, there's a small stub that I use to act as a test server for my outgoing requests, which should answer your question. You can set any status code, header or response body by modifying it:
#!/usr/bin/env python
# Reflects the requests with dummy responses from HTTP methods GET, POST, PUT, and DELETE
# Written by Tushar Dwivedi (2017)
import json
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from optparse import OptionParser
class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
request_path = self.path
print("\n----- Request Start ----->\n")
print("request_path :", request_path)
print("self.headers :", self.headers)
print("<----- Request End -----\n")
self.send_response(200)
self.send_header("Set-Cookie", "foo=bar")
self.end_headers()
self.wfile.write(json.dumps({'hello': 'world', 'received': 'ok'}))
def do_POST(self):
request_path = self.path
# print("\n----- Request Start ----->\n")
print("request_path : %s", request_path)
request_headers = self.headers
content_length = request_headers.getheaders('content-length')
length = int(content_length[0]) if content_length else 0
# print("length :", length)
print("request_headers : %s" % request_headers)
print("content : %s" % self.rfile.read(length))
# print("<----- Request End -----\n")
self.send_response(200)
self.send_header("Set-Cookie", "foo=bar")
self.end_headers()
self.wfile.write(json.dumps({'hello': 'world', 'received': 'ok'}))
do_PUT = do_POST
do_DELETE = do_GET
def main():
port = 8082
print('Listening on localhost:%s' % port)
server = HTTPServer(('', port), RequestHandler)
server.serve_forever()
if __name__ == "__main__":
parser = OptionParser()
parser.usage = ("Creates an http-server that will echo out any GET or POST parameters, and respond with dummy data\n"
"Run:\n\n")
(options, args) = parser.parse_args()
main()
Again, even if you are just learning, and you even need to add 5-6 of if elses to the above to do what you are doing, it's better to do things right from the beginning, to avoid a lot of rework in future. Use a framework capable of handling boilerplate stuff for you.

How can I debug POST requests with python's BaseHTTPServer / SimpleHTTPServer?

I found a script on this site for running a simple server via the command line with python.
I added some print lines in because I'd like to print out the GET and POST parameters via the command line for requests, but I can't seem to get them to show up anywhere.
If I just print our our the s variable (pprint (vars(s))) I end up seeing this:
{'client_address': ('127.0.0.1', 53373),
'close_connection': 1,
'command': 'GET',
'connection': <socket._socketobject object at 0x10b6560c0>,
'headers': <mimetools.Message instance at 0x10b689ab8>,
'path': '/favicon.ico',
'raw_requestline': 'GET /favicon.ico HTTP/1.1\r\n',
'request': <socket._socketobject object at 0x10b6560c0>,
'request_version': 'HTTP/1.1',
'requestline': 'GET /favicon.ico HTTP/1.1',
'rfile': <socket._fileobject object at 0x10b6538d0>,
'server': <BaseHTTPServer.HTTPServer instance at 0x10b6893f8>,
'wfile': <socket._fileobject object at 0x10b6536d0>}
I tried to then use the print command with each of the indices, (pprint (vars(s.connection))) but that's not working.
Here is the modified script:
#!/usr/bin/python
import time
import BaseHTTPServer
from pprint import pprint
HOST_NAME = 'localhost' # !!!REMEMBER TO CHANGE THIS!!!
PORT_NUMBER = 9000 # Maybe set this to 9000.
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_HEAD(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
def do_GET(s):
"""Respond to a GET request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><head><title>Title goes here.</title></head>")
s.wfile.write("<body><form action='.' method='POST'><input name='x' value='1' /><input type='submit' /></form><p>This is a test.</p>")
# If someone went to "http://something.somewhere.net/foo/bar/",
# then s.path equals "/foo/bar/".
s.wfile.write("<p>GET: You accessed path: %s</p>" % s.path)
s.wfile.write("</body></html>")
pprint (vars(s))
def do_POST(s):
"""Respond to a POST request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><head><title>Title goes here.</title></head>")
s.wfile.write("<body><p>This is a test.</p>")
s.wfile.write("<body><form action='.' method='POST'><input type='text' name='xxxxxxxxxxxx' value='0000000000000000000000' /><input type='submit' /></form><p>This is a test.</p>")
# If someone went to "http://something.somewhere.net/foo/bar/",
# then s.path equals "/foo/bar/".
s.wfile.write("<p>POST: You accessed path: %s</p>" % s.path)
s.wfile.write("</body></html>")
pprint (vars(s))
pprint (vars(s.connection))
pprint (vars(s.headers))
pprint (vars(s.request))
pprint (vars(s.rfile))
pprint (vars(s.server))
pprint (vars(s.wfile))
pprint (vars(s.fp))
"""pprint (vars(s.request))"""
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
How can I print out POST and GET parameters using a simple script?
Desired output via the command line would look something like:
1.0.0.127. - - [03/Oct/2012 16:02:05] "POST / HTTP/1.1" 200 -
foo=1
bar=2
bis=3
It's not tremendously obvious, but the handler is using sockets behind the scenes. So you need to read the raw data from the socket, and then interpret it.
Use the urlparse module.
In Python 2, you want urlparse.parse_qs.
In Python 3, the library is renamed: you want urllib.parse.parse_qs.
Import urlparse, and then modify your do_POST method like so:
def do_POST(s):
"""Respond to a POST request."""
# Extract and print the contents of the POST
length = int(s.headers['Content-Length'])
post_data = urlparse.parse_qs(s.rfile.read(length).decode('utf-8'))
for key, value in post_data.iteritems():
print "%s=%s" % (key, value)
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
...
Set up a simple test client:
#!/usr/bin/env python
import urllib
import urllib2
url = 'http://localhost:9000'
post_dict = {'foo' : 1,
'bar' : 2,
'bis' : 3}
params = urllib.urlencode(post_dict)
post_req = urllib2.Request(url)
post_req.add_data(params)
response = urllib2.urlopen(post_req)
response_data = response.read()
response.close()
print response_data
Start the server, and then run the client:
ire#localhost$ python http_server.py
Wed Oct 3 21:38:51 2012 Server Starts - localhost:9000
foo=[u'1']
bar=[u'2']
bis=[u'3']
You can use cgi module instead of urlparse. cgi implements POST params parsing out of the box. Using well-tested libraries seems better.
import cgi
def do_POST(self):
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={"REQUEST_METHOD": "POST"}
)
for item in form.list:
print "%s=%s" % (item.name, item.value)

Categories