Python Server Logger - python

I need some help figuring out why this isn't working the way it should. The goal was to stand up a local python server (similar to SimpleHTTPServer) but have it log requests/responses to a file to get a better understanding of how browser DOM behaves. I want to be able to browse to localhost and have it react like a webserver would.
When using this it appears to be hanging at localhost and not redirecting it the 'sample.html' file. Any thoughts on why this might be happening?
Code:
import os, re, socket, thread, threading;
# Settings that you may want to change:
WEBSERVER_PORT = 28876;
LOGS_FOLDER = os.path.join(os.getcwd(), 'logs');
TEST_HTML = 'sample.html';
VERBOSE_OUTPUT = True;
# Settings that you may need to change if you add new file types:
DEFAULT_MIME_TYPE = 'application/octet-stream';
REGISTERED_MIME_TYPES = {
'text/html': ['htm', 'html'],
'text/javascript': ['js'],
'image/svg+xml': ['svg'],
'image/jpeg': ['jpg'],
};
def MimeType(path):
last_dot = path.rfind('.');
if last_dot != -1:
ext = path[last_dot + 1:].lower();
for mime_type, exts in REGISTERED_MIME_TYPES.items():
if ext in exts:
return mime_type;
return DEFAULT_MIME_TYPE;
def GenerateRedirect(new_path):
return (301, 'Moved permanently', \
['Location: http://localhost:%s%s' % (WEBSERVER_PORT, new_path)], \
'text/html', '%s' % (new_path, new_path));
def Reply404(path, query, data):
return 404, 'Not found', 'text/plain', 'The path %s was not found' % path;
def ReplyFile(path, query, data):
path = os.path.join(os.getcwd(), path[1:].replace(os.altsep, os.sep));
if os.path.isfile(path):
try:
data = open(path, 'rb').read();
except:
print ' Cannot open file %s' % path;
return 500, 'Internal server error', 'text/plain', \
'The path %s could not be read' % path;
if VERBOSE_OUTPUT:
print ' Reply = file %s' % path;
return 200, 'OK', MimeType(path), data;
print ' Cannot find file %s' % path;
return 404, 'Not found', 'text/plain', 'The path %s was not found' % path;
def ReplyLog(path, query, data):
path = os.path.join(LOGS_FOLDER, path[len('/logs/'):].replace('/', os.sep));
try:
open(path, 'ab').write(data + '\r\n');
except:
print ' Cannot write to log file %s' % path;
return 500, 'Internal server error', 'text/plain', \
'The path %s could not be read' % path;
if VERBOSE_OUTPUT:
print ' Wrote %s bytes to log file %s' % (len(data), path);
return 200, 'OK', 'text/plain', 'Log successful';
# Replies contains entries in one of two forms:
# "regexp": ("mimetype", "body"),
# "regexp": function,
# The first form causes the server to reply with "200 OK" and the given body and
# mimetype. The second form requires "function" to accept the HTTP request path
# as an argument and return a tuple containing the HTTP return code, HTTP reason
# message, mimetype and body.
replies = [
(r'^/$', GenerateRedirect('/' + TEST_HTML)),
(r'^/logs/.*$', ReplyLog),
(r'^.*$', ReplyFile),
];
def Main():
if not os.path.isdir(LOGS_FOLDER):
try:
os.mkdir(LOGS_FOLDER);
except:
print 'Cannot create logs folder %s' % LOGS_FOLDER;
return;
server_socket = socket.socket();
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1);
server_socket.bind(('', WEBSERVER_PORT));
server_socket.listen(1);
print 'Webserver running at http://localhost:%d/' % WEBSERVER_PORT;
print;
thread_counter = 1;
while 1:
client_socket, client_address = server_socket.accept();
thread = threading.Thread(target = ConnectionThread, \
args = (client_socket, client_address));
thread.start();
thread_counter += 1;
def ConnectionThread(client_socket, client_address):
try:
print '** Connection from %s:%s opened.' % client_address;
while 1:
try:
request_headers, request_content = ReadRequest(client_socket);
if request_headers is None:
break;
response = HandleRequest(request_headers, request_content);
try:
client_socket.send(response);
except socket.error, e:
raise ConnectionError('Cannot send response');
except ConnectionError, e:
print ' ConnectionError: %s' % e;
try:
client_socket.close();
except:
pass;
break;
print '** Connection from %s:%s closed' % client_address;
except:
thread.interrupt_main();
class ConnectionError(Exception):
def __init__(self, value):
self.value = value;
def __str__(self):
return self.value;
def GetContentLength(request_headers):
for request_header in request_headers:
if request_header.find(':') != -1:
name, value = request_header.split(':', 1);
if name.strip().lower() == 'content-length':
try:
return int(value);
except ValueError:
raise ConnectionError('Bad content-length value: %s' % value);
return None;
def ReadRequest(client_socket):
request = '';
request_headers = None;
request_headers_length = None;
if VERBOSE_OUTPUT:
print '>> Accepted request, reading headers...',;
while request_headers is None:
try:
request += client_socket.recv(256);
except socket.error, e:
if VERBOSE_OUTPUT:
print;
raise ConnectionError('Connection dropped while reading request headers.');
if len(request) == 0:
if VERBOSE_OUTPUT:
print;
# raise ConnectionError('Connection closed.');
return None, None;
request_headers_length = request.find('\r\n\r\n');
if request_headers_length != -1:
request_headers = request[:request_headers_length].split('\r\n')[:-1];
# One line breaks is part of the headers
request_headers_length += 2;
# The other is not part of the headers or the content:
request_content_length = GetContentLength(request_headers);
if request_content_length is None:
if VERBOSE_OUTPUT:
print '\r>> Accepted request, read %d bytes of headers and ' \
'no content.' % (request_headers_length);
return request_headers, None;
request_content = request[request_headers_length + 2:];
else:
if VERBOSE_OUTPUT:
print '\r>> Accepted request, read %d bytes of headers...' % \
len(request),;
while len(request_content) < request_content_length:
if VERBOSE_OUTPUT:
print '\r>> Accepted request, read %d bytes of headers and ' \
'%d/%d bytes of content...' % (request_headers_length, \
len(request_content), request_content_length),;
read_size = request_content_length - len(request_content);
try:
request_content += client_socket.recv(read_size);
except socket.error, e:
if VERBOSE_OUTPUT:
print;
raise ConnectionError('Connection dropped while reading request content.');
if VERBOSE_OUTPUT:
print '\r>> Accepted request, read %d bytes of headers and ' \
'%d bytes of content. %s' % (request_headers_length, \
len(request_content), ' ' * len(str(request_content_length)));
return request_headers, request_content;
def HandleRequest(request_headers, request_content):
end_method = request_headers[0].find(' ');
if end_method == -1:
raise ConnectionError('Bad request header; no method recognized');
method = request_headers[0][:end_method];
end_path = request_headers[0].find(' ', end_method + 1);
if end_path == -1:
raise ConnectionError('Bad request header; no path recognized');
path = request_headers[0][end_method + 1:end_path];
query = None;
start_query = path.find('?');
if start_query != -1:
query = path[start_query:];
path = path[:start_query];
if VERBOSE_OUTPUT:
print ' method=%s, path=%s, query=%s, %s headers' % \
(method, path, query, len(request_headers));
code, reason, mime_type, body = 404, 'Not found', 'text/plain', 'Not found';
response = None;
for path_regexp, response in replies:
if re.match(path_regexp, path):
if type(response) != tuple:
response = response(path, query, request_content);
break;
assert type(response) == tuple and len(response) in [2, 4, 5], \
'Invalid response tuple %s' % repr(response);
code, reason, headers, mime_type, body = 200, 'OK', [], 'text/plain', '';
if len(response) == 2:
mime_type, body = response;
elif len(response) == 4:
code, reason, mime_type, body = response;
else:
code, reason, headers, mime_type, body = response;
response_lines = [
'HTTP/1.1 %03d %s' % (code, reason),
'Content-Type: %s' % mime_type,
'Date: Sat Aug 28 1976 09:15:00 GMT',
'Expires: Sat Aug 28 1976 09:15:00 GMT',
'Cache-Control: no-cache, must-revalidate',
'Pragma: no-cache',
'Accept-Ranges: bytes',
'Content-Length: %d' % len(body),
] + headers + [
'',
body
];
response = '\r\n'.join(response_lines);
if VERBOSE_OUTPUT:
print '<< %s (%d bytes %s)' % \
(response.split('\r\n')[0], len(response), mime_type);
return response;
if __name__ == "__main__":
Main();
Any feedback would be greatly appreciated.
Thank you!

Related

Basic Auth is failing in AWS Lambda function

I'm calling a REST API with basic authentication in an AWS Lambda function. This is my code
import json, os, base64
from urllib import request
def lambda_handler(event, context):
retStatusCode = 0
retBody = ""
try:
url = os.environ['URL']
username = os.environ['USERNAME']
password = os.environ['PASSWORD']
requestURL = url + "/" + event['queueID'] + "/" + event['cli'];
#print ("QUEUEID IS: " + event['queueID'])
#print ("CLI IS: " + event['cli'])
#print ("URL IS: " + requestURL)
req = request.Request(requestURL, method="POST")
myStr = '%s:%s' % (username, password)
myBytes = myStr.encode("utf-8")
base64string = base64.b64encode(myBytes)
req.add_header("Authorization", "Basic %s" % base64string)
resp = request.urlopen(req)
responseJSON = json.load(resp)
retStatusCode = responseJSON["Result"]
retBody = responseJSON["Message"]
except Exception as e:
retStatusCode = 500
retBody = "An exception occurred: " + str(e)
return {
'statusCode': retStatusCode,
'body': retBody
}
However, I'm getting a "HTTP Error 401: Unauthorized" returned. If I call the API method in Postman with the same credentials, it returns data successfully, so I figure it must be something to do with the format of the header I'm adding, but just can't see what's wrong.
The problem is in this line:
req.add_header("Authorization", "Basic %s" % base64string)
From the documentation, base64.b64encode method is designed to "return the encoded bytes".
If you try to execute this code in REPL, you'll see that your resulting header looks wrong. It concatenates string with bytes:
>>> "Basic %s" % base64string
"Basic b'aGVsbG86d29ybGQ='"
You can read more about Python's b' syntax here.
So you need to decode the string back to utf8.
req.add_header("Authorization", "Basic %s" % base64string.decode('utf-8'))
The result will look a like a valid Auth header now:
>>> "Basic %s" % base64string.decode('utf-8')
'Basic aGVsbG86d29ybGQ='

Python socket with proxy. Authentication Error (407)

Thank you for reading.
I was able to make a request with a socket connected to a proxy. But, return a 407. That is, there is an error in authentication in the proxy.
I put a Headers ['proxy-yauthorization'], but it doesn't work. It is sent with Basic + "User: Pass" based64.
def http_proxy_connect(address, proxy = None, auth = None, headers = {}):
import socket
import base64
def valid_address(addr):
""" Verify that an IP/port tuple is valid """
return isinstance(addr, (list, tuple)) and len(addr) == 2 and isinstance(addr[0], str) and isinstance(addr[1], (int, int))
if not valid_address(address):
raise ValueError('Invalid target address')
if proxy == None:
s = socket.socket()
s.connect(address)
return s, 0, {}
if not valid_address(proxy):
raise ValueError('Invalid proxy address')
headers = {
'host': address[0]
}
headers['proxy-authorization'] = 'Basic ' + auth
headers['Proxy-Authorization'] = 'Basic ' + auth
headers['Proxy-Authenticate'] = 'Basic'
s = socket.socket()
s.connect(proxy)
fp = s.makefile('wr')
fp.write('CONNECT %s:%d HTTP/1.0\r\n' % address)
fp.write('\r\n'.join('%s: %s' % (k, v) for (k, v) in headers.items()) + '\r\n\r\n')
print('\r\n'.join('%s: %s' % (k, v) for (k, v) in headers.items()) + '\r\n\r\n')
fp.flush()
statusline = fp.readline().rstrip('\r\n')
if statusline.count(' ') < 2:
fp.close()
s.close()
raise IOError('Bad response')
version, status, statusmsg = statusline.split(' ', 2)
if not version in ('HTTP/1.0', 'HTTP/1.1'):
fp.close()
s.close()
raise IOError('Unsupported HTTP version')
try:
status = int(status)
except ValueError:
fp.close()
s.close()
raise IOError('Bad response')
response_headers = {}
while True:
tl = ''
l = fp.readline().rstrip('\r\n')
if l == '':
break
if not ':' in l:
continue
k, v = l.split(':', 1)
response_headers[k.strip().lower()] = v.strip()
fp.close()
return (s, status, response_headers)
print(http_proxy_connect(('xxxxx',80), ('xxxxx',50000), 'user:pass in base64!'));
I am currently sending in Headers ['proxy-yauthorization'], "Basic User: Pass" in base64.

Add time stamp to end of file name before upload to ftp

I am using the following function to upload mp4 file to ftp
def run(self):
while True:
status = "Failed"
#logger.debug("Queue size: %s" %(str(self.queue.qsize())))
(env, pfile) = self.queue.get()
logger.debug("Thread %s Received %s and %s" %(str(self.threadID), pfile, env))
metaName = "%s/%s.evs.xml" %(self.config[env]["upload_folder"], pfile)
mediaName = "%s/%s.mp4" %(self.config[env]["upload_folder"], pfile)
mediaName2 = "%s.mp4" %(pfile,)
logger.info("Thread %s - Uploading metadata to %s FTP: %s" %(str(self.threadID), env, metaName))
try:
ur = uploadFTPMP(metaName, env)
status = "Success"
except:
logger.debug("Thread %s - Uploading of metadata %s to %s failed" %(str(self.threadID), metaName, env))
uploadComplete(pfile, status, env)
if status == "Success":
logger.info("Thread %s - Sleeping to allow cms to pick up xml" %(str(self.threadID)))
time.sleep(90)
logger.info( "Thread %s - Uploading mediafile %s to %s" %(str(self.threadID), mediaName, env))
ur = uploadFTP(mediaName, env)
status = "Success"
filename = os.path.basename(mediaName)
http_client.HTTPConnection.debuglevel = 1
logging.basicConfig(level=logging.INFO)
logging.basicConfig()
logging.getLogger().setLevel(logging.INFO)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.INFO)
requests_log.propagate = True
url = 'http://www.webdev.com/web/home/testPOST'
headers = {'content-type': 'application/json'}
payload = {'mediaName': mediaName2 ,'status': 'sucsses'}
r = requests.post(url, data=json.dumps(payload), headers=headers)
r.text
r.status_code
r.connection.close()
uploadComplete(pfile, status, env)
logger.info("Thread %s - Upload completed with status %s" %(str(self.threadID), status))
self.queue.task_done()
I'm trying to modify the mp4 file before up, time stamp to end of the file so the file name will be video-%H%M%S.mp4 then it will be uploaded.
Then, when I perform the post request, as you can see, it sends 2 parameters now, I need to add 3rd parameter which is the new name with time stamp. Any tips to achieve this logic?
Here is my ftp function also.
def uploadFTPMP(filepath, env):
global config
ftpsrv = config[env]["active"]
ftpusr = config[env]["ftpuser"]
ftppwd = config[env]["ftppass"]
filename = os.path.basename(filepath)
try:
ftp = ftplib.FTP(ftpsrv)
ftp.login(ftpusr, ftppwd)
except:
logger.error("Ftp connection error has occurred")
raise
else:
f = open(filepath, "r")
cmd = "STOR %s" %(filename)
out = ftp.storbinary(cmd, f)
f.close()
ftp.quit()
return out
You should pass the datetime to the filename.
import datetime
medianame = 'somefile_{timestamp}.mp4'.format(
timestamp=datetime.datetime.now().isoformat()
)
The datetime can also be formatted as you require e.g. datetime.datetime.now().strftime('%Y-%m-%d_%H%M'). The strftime() documentation has a list of accepted formatting directives that can be used.

converting curl command to pycurl

So the curl command I'm using is as follows:
cmd = "curl --write-out %{http_code} -X PUT -T " + self.basedir + putfile + " -# -o /dev/null " + self.uri + "/" + self.dist + "/" + putfile
I'd like to change this from invoking a system command to using pycurl. This way I can have more granular control over it and ultimately implement a progress bar for it. However, when I try and convert to python, my resulting script fails. Here is my efforts towards a python script:
f = open(filepath, "rb")
fs = os.path.getsize(filepath)
c = pycurl.Curl()
c.setopt(c.URL, target_url)
c.setopt(c.HTTPHEADER, ["User-Agent: Load Tool (PyCURL Load Tool)"])
c.setopt(c.PUT, 1)
c.setopt(c.READDATA, f)
c.setopt(c.INFILESIZE, int(fs))
c.setopt(c.NOSIGNAL, 1)
c.setopt(c.VERBOSE, 1)
c.body = StringIO()
c.setopt(c.WRITEFUNCTION, c.body.write)
try:
c.perform()
except:
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
f.close()
c.close()
sys.stdout.write(".")
sys.stdout.flush()
Here's what that outputs:
* About to connect() to ************ port 8090 (#0)
* Trying 16.94.124.53... * connected
> PUT /incoming/ HTTP/1.1
Host: ***********
Accept: */*
User-Agent: Load Tool (PyCURL Load Tool)
Content-Length: 21
Expect: 100-continue
< HTTP/1.1 100 Continue
* We are completely uploaded and fine
< HTTP/1.1 500 Internal Server Error
< Content-type: text/html
* no chunk, no close, no size. Assume close to signal end
<
Thanks in advance for you help!
I've did uploading working module, you can find your answers looking in code.
And you can find almost all answers regarding pycurl by digging libcurl examples and Docs.
'''
Created on Oct 22, 2013
#author: me
'''
import pycurl
import os
import wx
import sys
import hashlib
from cStringIO import StringIO
def get_file_hash(full_filename):
BLOCKSIZE = 65536
hasher = hashlib.md5()
with open(full_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
return hasher.hexdigest()
class FtpUpload(object):
def __init__(self, server, username, password, **items):
self.server = server
self.username = username
self.password = password
self.gauge = items.get("gauge")
self.sb_speed = items.get("sb_speed")
self.upload_file_size = items.get("upload_file_size")
self.upload_file_speed = items.get("upload_file_speed")
self.filesize = 0
self.ftp_filehash = '0'
def sizeToNiceString(self, byteCount):
for (cutoff, label) in [(1024*1024*1024, "GB"), (1024*1024, "MB"), (1024, "KB")]:
if byteCount >= cutoff:
return "%.2f %s" % (byteCount * 1.0 / cutoff, label)
if byteCount == 1:
return "1 byte"
else:
return "%d bytes" % byteCount
def initRange(self, filesize):
self.filesize = filesize
self.gauge.SetRange(filesize)
def updateValue(self, upload_d):
upload_d_int = int(upload_d)
self.gauge.SetValue(upload_d_int)
upload_d_str = self.sizeToNiceString(upload_d)
upload_percent = int((upload_d*100)/self.filesize)
upload_d_status = "{0}/{1} ({2}%)".format(upload_d_str, self.sizeToNiceString(self.filesize), upload_percent)
self.sb_speed.SetStatusText(upload_d_status, 1)
self.upload_file_size.SetLabel(upload_d_status)
self.upload_file_speed.SetLabel(upload_d_str)
def progress(self, download_t, download_d, upload_t, upload_d):
self.updateValue(upload_d)
def test(self, debug_type, debug_msg):
if len(debug_msg) < 300:
print "debug(%d): %s" % (debug_type, debug_msg.strip())
def ftp_file_hash(self, buf):
sys.stderr.write("{0:.<20} : {1}\n".format('FTP RAW ', buf.strip()))
ftp_filehash = dict()
item = buf.strip().split('\n')[0]
ext = item.split('.')[1]
if len(ext) > 3:
ftp_filename = item[:-33]
ftp_filehash = item[-32:]
self.ftp_filehash = ftp_filehash
def get_ftp_file_hash(self, filename):
c = pycurl.Curl()
list_file_hash = 'LIST -1 ' + filename + "_*"
sys.stderr.write("{0:.<20} : {1} \n".format('FTP command ', list_file_hash))
c.setopt(pycurl.URL, self.server)
c.setopt(pycurl.USERNAME, self.username)
c.setopt(pycurl.PASSWORD, self.password)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.DEBUGFUNCTION, self.test)
c.setopt(pycurl.CUSTOMREQUEST, list_file_hash)
c.setopt(pycurl.WRITEFUNCTION, self.ftp_file_hash)
c.perform()
c.close()
def delete_ftp_hash_file(self, ftp_hash_file_old):
c = pycurl.Curl()
delete_hash_file = 'DELE ' + ftp_hash_file_old
sys.stderr.write("{0:.<20} : {1} \n".format('FTP command ', delete_hash_file))
c.setopt(pycurl.URL, self.server)
c.setopt(pycurl.USERNAME, self.username)
c.setopt(pycurl.PASSWORD, self.password)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.DEBUGFUNCTION, self.test)
c.setopt(pycurl.CUSTOMREQUEST, delete_hash_file)
try:
c.perform()
except Exception as e:
print e
c.close()
def upload(self, full_filename, filesize):
self.initRange(filesize)
filename = os.path.basename(full_filename)
sys.stderr.write("filename: %s\n" % full_filename)
c = pycurl.Curl()
c.setopt(pycurl.USERNAME, self.username)
c.setopt(pycurl.PASSWORD, self.password)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.DEBUGFUNCTION, self.test)
c.setopt(pycurl.NOBODY, True)
c.setopt(pycurl.HEADER, False)
ftp_file_path = os.path.join(self.server, os.path.basename(full_filename))
file_hash = get_file_hash(full_filename)
ftp_hash_file = ftp_file_path + "_%s" % file_hash
# Getting filesize if exist on server.
try:
c.setopt(pycurl.URL, ftp_file_path)
c.perform()
filesize_offset = int(c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD))
except Exception as error_msg:
print error_msg
wx.MessageBox(str(error_msg), 'Connection error!',
wx.OK | wx.ICON_ERROR)
# Exit upload function.
return True
ftp_file_append = True
# Get ftp file hash.
self.get_ftp_file_hash(filename)
offset = filesize_offset == -1 and '0' or filesize_offset
sys.stderr.write("L_file hash : {0:.<60}: {1:<40}\n".format(filename, file_hash))
sys.stderr.write("F_file hash : {0:.<60}: {1:<40}\n".format(filename, self.ftp_filehash))
sys.stderr.write("{0:15} : {1:.>15}\n".format('filesize:', filesize))
sys.stderr.write("{0:15} : {1:.>15}\n".format('ftp_filesize', offset))
sys.stderr.write("{0:15} : {1:.>15}\n".format('to upload:', filesize - int(offset)))
# File not exist on FTP server.
if filesize_offset == -1:
# file not exist: uploading from offset zero.
ftp_file_append = False
filesize_offset = 0
# Local and FTP file size and files MD5 are the same.
elif filesize_offset == self.filesize and file_hash == self.ftp_filehash:
sys.stderr.write("--- File exist on server! ---\n\n")
self.upload_file_speed.SetLabel("File exist on server!")
self.sb_speed.SetStatusText("File exist on server!", 1)
# Check next filename.
return False
# Ftp file and local file different data.
elif file_hash != self.ftp_filehash:
ftp_file_append = False
filesize_offset = 0
ftp_hash_file_old = filename + "_" + self.ftp_filehash
# delete old hash file.
self.delete_ftp_hash_file(ftp_hash_file_old)
c.setopt(pycurl.FTPAPPEND, ftp_file_append)
c.setopt(pycurl.UPLOAD, True)
c.setopt(pycurl.PROGRESSFUNCTION, self.progress)
with open('filehash.txt', 'w') as f:
f.write(file_hash)
for item in ("filehash.txt", full_filename):
# dont show progress by default.
noprogress = True
# upload ftp_hash_file first.
ftp_url = ftp_hash_file
with open(item, "rb") as f:
# chages ftp_url and show progress values, add filesize_offset.
if item != "filehash.txt":
f.seek(filesize_offset)
noprogress = False
ftp_url = ftp_file_path
c.setopt(pycurl.URL, ftp_url)
c.setopt(pycurl.NOPROGRESS, noprogress)
c.setopt(pycurl.READFUNCTION, f.read)
try:
c.perform()
if item != "filehash.txt":
sys.stderr.write("{0:15} : {1:.>15}\n\n".format("size uploaded", int(c.getinfo(pycurl.SIZE_UPLOAD))))
except Exception as error_msg:
print error_msg
wx.MessageBox(str(error_msg), 'Connection error!',
wx.OK | wx.ICON_ERROR)
# Exit upload function.
return True
self.ftp_filehash = '0'
c.close()
if __name__ == '__main__':
pass

uploading file using urllib2

I'm new to python and I'm writing a code to upload a file using urllib2 but I can't make it work.
Here's the code:
class Get(object):
handlers = list()
def __init__(self,url):
self.url = url
self.request = urllib2.Request(url)
self.request.add_header('User-Agent',"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13")
def auth(self,username,password):
pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
pass_mgr.add_password(None, self.url, username, password)
handler = urllib2.HTTPBasicAuthHandler(pass_mgr)
self._add_handler(handler)
def perform(self):
try:
opener = self._opener()
res = opener.open(self.request)
to_return = {
'code': res.code,
'contents': res.read(),
'url': res.geturl(),
'headers': dict(res.info())
}
except urllib2.URLError as e:
if hasattr(e, 'reason'):
print 'Error accessing the server.'
print 'Reason: ', e.reason
elif hasattr(e, 'code'):
print 'The server couldn\'t fulfill the request.'
print e
else:
return to_return
def _add_handler(self,handler):
self.handlers.append(handler)
def _opener(self):
return urllib2.build_opener(*self.handlers)
class Multipart(object):
def __init__(self,url):
super(Multipart,self).__init__(url)
self.data = list()
def perform(self):
b = choose_boundary()
tmp = "".join(map(lambda x: "--%s \r\n %s" % (b,x), self.data ))
tmp += "--%s--\r\n" % b
self.request.add_data(tmp)
content_type = 'multipart/form-data; boundary=%s' % b
self.request.add_unredirected_header('Content-Type', content_type)
#self.request.add_header("Content-Type","multipart/form-data, boundary=%s" % b)
return super(HTTP.Multipart,self).perform()
def set_data(self,data,file = None):
for i in data:
if file:
self.data.append(self._encode_file(i,**data[i]))
else:
self.data.append(self._encode_text(i,data[i]))
def _encode_text(self,key,value):
return "Content-Disposition: form-data; name=\"%s\"\r\n\r\n%s\r\n" % (key, value)
def _encode_file(self,key,path,filename=None,mime_type=None):
if not exists(path):
raise RuntimeError('%s not found' % path)
fname = filename or basename(path)
mime = mime_type or guess_type(path)[0] or 'application/octet-stream'
size = getsize(path)
content = ""
with open(path,'rb') as fobj:
content = fobj.read(size)
converted_text = "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"\r\n" % (key,fname)
converted_text += "Content-Transfer-Encoding: binary\r\n"
converted_text += "Content-Type: %s \r\n" % mime
converted_text += "Content-Length: %s \r\n" % size
converted_text += "\r\n %s \r\n" % content
return converted_text
Thanks MultipartPostHandler everything is working fine now.
class Post(Get): # inherits the Get class above
def __init__(self,url,data,multipart=False):
super(HTTP.Post,self).__init__(url)
if multipart:
from MultipartPostHandler import MultipartPostHandler
self._add_handler(MultipartPostHandler)
self.request.add_data(data)
else:
self.request.add_data(urlencode(data))

Categories