I am trying to write a simple multithreaded http server which answers requests after 5 sec.
This code does not work, two simultaneous requests take 10 sec to complete, and I don't understand why.
from socketserver import ThreadingMixIn
from http.server import SimpleHTTPRequestHandler, HTTPServer, BaseHTTPRequestHandler
class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
pass
import sys
import os
import time
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
time.sleep(5)
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
self.wfile.write("answer")
return
server = ThreadingSimpleServer(('', 8000), Handler)
try:
while 1:
sys.stdout.flush()
server.handle_request()
except KeyboardInterrupt:
print("Finished")
You are calling handle_request, which handles one request after the other. You have to use serve_forever, so that the server can handle request automatically.
Related
Hey so I have been trying to make a script that just reads the amount of followers someone has and then send the amount of followers back to the server that requested it. I have absolutely no idea how http works or how to properly format a variable to go across it. Whenever I make a request I get a bad response error. I know what is causing this, it’s the variable and how python formats it, but how would I send this over http? Any help? (Also this server won't have very much traffic at all)
import selenium
from selenium import webdriver
import time
import http
import http.server
import socketserver
from http.server import HTTPServer, BaseHTTPRequestHandler
import json
driver = webdriver.Chrome()
class requestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('content-type', 'text/html')
self.end_headers
print(self.path[1:])
driver.get('https://www.tiktok.com/#%s?lang=en' % self.path[1:])
FOLLOWERS = driver.find_element_by_xpath('//*[#id="main"]/div[2]/div[2]/div/header/h2[1]/div[2]/strong').text
driver.close
print(str(FOLLOWERS))
self.wfile.write((str(FOLLOWERS).encode()))
def main():
PORT = 8000
server = HTTPServer(('', PORT), requestHandler)
print('Server running on port %s' % PORT)
server.serve_forever()
if __name__ == '__main__':
main()```
You forgot to create a function call when ending the header.
self.end_headers()
is what you want (the round brackets are missing). otherwise, chrome won't recognize this as a valid HTTP response.
Interestingly, this code works when using firefox.
I can't figure out how to make this small http server reply the get request and play a sound in parallel.
Current code, does not close the get request until the sound "winsound.PlaySound("SystemExit", winsound.SND_ALIAS)" end playing.
What i need is for the sound to be async to the request so that the get request ends asap and the sound keeps playing.
#!/usr/bin/env python3
from http.server import HTTPServer, SimpleHTTPRequestHandler, test
import socketserver
from urllib.parse import urlparse
from urllib.parse import parse_qs
import sys
import winsound
class MyHttpRequestHandler(SimpleHTTPRequestHandler):
try:
def do_GET(self):
# Sending an '200 OK' response
self.send_response(200)
# Setting the header
self.send_header("Content-type", "text/html")
# Whenever using 'send_header', you also have to call 'end_headers'
self.end_headers()
html = "ok"
# Writing the HTML contents with UTF-8
self.wfile.write(bytes(html, "utf8"))
winsound.PlaySound("SystemExit", winsound.SND_ALIAS)
return
except Exception as e:
print(str(e))
# Create an object of the above class
handler_object = MyHttpRequestHandler
PORT = 8000
my_server = socketserver.TCPServer(("", PORT), handler_object)
# Star the server
my_server.serve_forever()
Use a flag to run it async:
winsound.PlaySound("SystemExit", winsound.SND_ALIAS|winsound.SND_ASYNC)
If you wish to have tighter control, use concurrent.futures.ThreadPoolExecutor() to run it in a different thread:
from concurrent.futures import ThreadPoolExecutor
import winsound
pool = ThreadPoolExecutor()
class MyHttpRequestHandler(SimpleHTTPRequestHandler):
try:
def do_GET(self):
# Sending an '200 OK' response
self.send_response(200)
# Setting the header
self.send_header("Content-type", "text/html")
# Whenever using 'send_header', you also have to call 'end_headers'
self.end_headers()
html = "ok"
# Writing the HTML contents with UTF-8
self.wfile.write(bytes(html, "utf8"))
pool.submit(winsound.PlaySound, "SystemExit", winsound.SND_ALIAS)
return
except Exception as e:
print(str(e))
BACKGROUND
So basically, it's question about execution scope in Python. Back to your code above, in order for request to finish it has to be execute all task.
send response (this one doesn't make effect, because you have to return result from the method get)
setting headers
playing music
Obviously, you return at the end of the request execution and your thread monitor just one. So request will finish after finishing all the tasks.
SOLUTION
So as you mentioned in the question, you are right. To play music on your Server on the request, you have to run async task and let you request return result from get request. For ex. by using asyncio (it's very handy lib, so check it out)
import asyncio
import socketserver
from http.server import SimpleHTTPRequestHandler
import winsound
class MyHttpRequestHandler(SimpleHTTPRequestHandler):
try:
def do_GET(self):
self.send_response(200)
self.wfile.write(bytes("Ok", "utf8"))
# Shared Queue of Background Tasks
asyncio.get_event_loop().run_until_complete(
MyHttpRequestHandler.play_music()
)
except Exception as e:
print(str(e))
#staticmethod
async def play_music():
try:
print('Playing sound!')
winsound.PlaySound("SystemExit", winsound.SND_ALIAS)
# Maybe you want to add Error handling
finally:
pass
# Create an object of the above class
handler = MyHttpRequestHandler
server = socketserver.TCPServer(
("", 8000), handler)
# Star the server
server.serve_forever()
I'm writing a small web server in Python, using BaseHTTPServer and a custom subclass of BaseHTTPServer.BaseHTTPRequestHandler. Is it possible to make this listen on more than one port?
What I'm doing now:
class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def doGET
[...]
class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
pass
server = ThreadingHTTPServer(('localhost', 80), MyRequestHandler)
server.serve_forever()
Sure; just start two different servers on two different ports in two different threads that each use the same handler. Here's a complete, working example that I just wrote and tested. If you run this code then you'll be able to get a Hello World webpage at both http://localhost:1111/ and http://localhost:2222/
from threading import Thread
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write("Hello World!")
class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
daemon_threads = True
def serve_on_port(port):
server = ThreadingHTTPServer(("localhost",port), Handler)
server.serve_forever()
Thread(target=serve_on_port, args=[1111]).start()
serve_on_port(2222)
update:
This also works with Python 3 but three lines need to be slightly changed:
from socketserver import ThreadingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler
and
self.wfile.write(bytes("Hello World!", "utf-8"))
Not easily. You could have two ThreadingHTTPServer instances, write your own serve_forever() function (don't worry it's not a complicated function).
The existing function:
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__serving = True
self.__is_shut_down.clear()
while self.__serving:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = select.select([self], [], [], poll_interval)
if r:
self._handle_request_noblock()
self.__is_shut_down.set()
So our replacement would be something like:
def serve_forever(server1,server2):
while True:
r,w,e = select.select([server1,server2],[],[],0)
if server1 in r:
server1.handle_request()
if server2 in r:
server2.handle_request()
I would say that threading for something this simple is overkill. You're better off using some form of asynchronous programming.
Here is an example using Twisted:
from twisted.internet import reactor
from twisted.web import resource, server
class MyResource(resource.Resource):
isLeaf = True
def render_GET(self, request):
return 'gotten'
site = server.Site(MyResource())
reactor.listenTCP(8000, site)
reactor.listenTCP(8001, site)
reactor.run()
I also thinks it looks a lot cleaner to have each port be handled in the same way, instead of having the main thread handle one port and an additional thread handle the other. Arguably that can be fixed in the thread example, but then you're using three threads.
I need to create a multithreaded web server in python 3, where each request gets a new thread. I followed a basic example from a blog. But, the server is always blocking, as sleep called in 1 thread blocks the other threads. Can someone help me on this?
Here is my code
import time
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
import threading
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
print('start-->')
time.sleep(5)
print('end-->')
self.wfile.write("Test".encode())
class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
pass
def run():
server = ThreadingSimpleServer(('0.0.0.0', 1234), Handler)
server.serve_forever()
if __name__ == '__main__':
run()
My problem is, If I send 2 requests to the server, the second one starts executing only after the first one is done. I need the sleep to be on a thread of it's own, without affecting the other threads. I will be grateful for any help.
I have some integration testing code that spawns a HTTP server in a different process for calling against. This server could potentially get polluted by activity so I'd like the ability to start and stop new instances of it on demand.
This unfortunately isn't working... I am running into a situation where the port my server was running on is still locked after my process exits(meaning if I run the test two times quickly, it fails the second time because the port is locked).
I've tried using atexit.register to bind the shutdown method and it's not working either.
Here's the code for the server:
from BaseHTTPServer import BaseHTTPRequestHandler
import SocketServer
import atexit
class RestHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == '/sanitycheck':
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write("{ 'text': 'You are sane.' }")
else:
self.wfile.write(self.path)
def kill_server(httpd):
open("/tmp/log", "w").write("KILLING")
httpd.shutdown()
def start_simple_server(port):
httpd = SocketServer.TCPServer(("", port), RestHTTPRequestHandler)
atexit.register(kill_server, httpd)
httpd.serve_forever()
return httpd
Nothing ever gets written to /tmp/log which makes me think that the atexit isn't getting called.
Here's how I instantiate the server:
p = Process(target=start_simple_server, args=(port,))
p.start()
And then when I'm done to terminate it, I just call:
p.terminate()
Which does kill the process and should(to my understanding) trigger the atexit call -- but it's not :(
Any thoughts?
Python atexit doesn't run when you terminate a process.
>>>import atexit
>>> def hook():
... print "hook ran"
...
>>> atexit.register(hook)
<function hook at 0x100414aa0>
>>>
# in another terminal: kill <python process id>
>>> Terminated
I wound up taking a slightly different approach inspired by some code from David Beazley... server code:
from BaseHTTPServer import BaseHTTPRequestHandler
import SocketServer
import multiprocessing
import cgi
import urlparse
class StoppableHTTPServerProcess(multiprocessing.Process):
def __init__(self, address, handler):
multiprocessing.Process.__init__(self)
self.exit = multiprocessing.Event()
self.server = StoppableHTTPServer(address, handler)
def run(self):
while not self.exit.is_set():
self.server.handle_request()
def shutdown(self):
self.exit.set()
class RestHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write(self.path)
class StoppableHTTPServer(SocketServer.TCPServer):
allow_reuse_address = True
timeout = 0.5
def start_simple_server(port):
process = StoppableHTTPServerProcess(("", port), RestHTTPRequestHandler)
return process
Calling code:
p = start_simple_server(port)
p.start()
And when I'm done...
p.shutdown()