python simple wsgi file upload script - What is wrong? - python

import os, cgi
#self_hosting script
tags = """<form enctype="multipart/form-data" action="save_file.py" method="post">
<p>File: <input type="file" name="file"></p>
<p><input type="submit" value="Upload"></p>
</form>"""
def Request(environ, start_response):
# use cgi module to read data
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, keep_blank_values=True)
try:
fileitem = form['file']
except KeyError:
fileitem = None
if fileitem and fileitem.file:
fn = os.path.basename(fileitem.filename)
with open(fn, 'wb') as f:
data = fileitem.file.read(1024)
while data:
f.write(data)
data = fileitem.file.read(1024)
message = 'The file "' + fn + '" was uploaded successfully'
else :
message = 'please upload a file.'
start_response('200 OK', [('Content-type','text/html')])
return [message + "<br / >" + tags]
Above is my python wsgi script that receives a file and writes it to the disk. However, upon executing (with a file selected):
Internal Server Error
An error occurred processing this request.
Request handler failed
Traceback (most recent call last):
File "C:\Python26\Http\Isapi.py", line 110, in Request
return Handler(Name)
File "C:\Python26\Http\Isapi.py", line 93, in
"/apps/py/" : lambda P: RunWSGIWrapper(P),
File "C:\Python26\Http\Isapi.py", line 86, in RunWSGIWrapper
return RunWSGI(ScriptHandlers[Path])
File "C:\Python26\Http\WSGI.py", line 155, in RunWSGI
Result = Application(Environ, StartResponse)
File "\\?\C:\Python26\html\save_file.py", line 13, in Request
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, keep_blank_values=True)
File "C:\Python26\Lib\cgi.py", line 496, in __init__
self.read_multi(environ, keep_blank_values, strict_parsing)
File "C:\Python26\Lib\cgi.py", line 620, in read_multi
environ, keep_blank_values, strict_parsing)
File "C:\Python26\Lib\cgi.py", line 498, in __init__
self.read_single()
File "C:\Python26\Lib\cgi.py", line 635, in read_single
self.read_lines()
File "C:\Python26\Lib\cgi.py", line 657, in read_lines
self.read_lines_to_outerboundary()
File "C:\Python26\Lib\cgi.py", line 685, in read_lines_to_outerboundary
line = self.fp.readline(1<<16)
AttributeError: 'module' object has no attribute 'readline'
Being pretty daft at wsgi and cgi module I have no idea to progress at this moment. any clues?

environ['wsgi.input'] is a stream like object. You need to cache it firstly to file like object, eg: tempfile.TemporaryFile or StringIO (io.BytesIO in python3):
from tempfile import TemporaryFile
import os, cgi
def read(environ):
length = int(environ.get('CONTENT_LENGTH', 0))
stream = environ['wsgi.input']
body = TemporaryFile(mode='w+b')
while length > 0:
part = stream.read(min(length, 1024*200)) # 200KB buffer size
if not part: break
body.write(part)
length -= len(part)
body.seek(0)
environ['wsgi.input'] = body
return body
def Request(environ, start_response):
# use cgi module to read data
body = read(environ)
form = cgi.FieldStorage(fp=body, environ=environ, keep_blank_values=True)
# rest of your code
For safety reason consider to mask environ value which you pass to FieldStorage

Self - answer :
Sorry I didn't said that This was PyISAPIe-specific problem. the file-like object environ['wsgi.input'] does not have readline() method like other environ varibles in different wsgi implementations would do.
the (inefficient workaround) is saving everything from environ['wsgi.input'] into a tempfile and pass it to FieldStorage.
So :
import tempfile, cgi
def some_wsgi_app(environ, start_response):
temp_file = tempfile.TemporaryFile()
temp_file.write(environ['wsgi.input'].read()) # or use buffered read()
temp_file.seek(0)
form = cgi.FieldStorage(fp=temp_file, environ=environ, keep_blank_values=True)
# do_something #
temp_file.close() #close and destroy temp file
# ... start_response, return ... #
However above example will fail to operate properly if uploaded data from user is too big.

Related

Microsoft Azure Timer Function not uploading files to blob storage

I am trying to do a timer trigger whereby every 5 minutes, the function will upload a file into my blob storage.
When I run my code locally, it works, but it fails when it is deployed on Azure. Any help will be appreciated.
Main Method
device_client = IoTHubDeviceClient.create_from_connection_string(CONNECTION_STRING)
PATH_TO_FILE = wget.download("link-of-something", out=os.getcwd())
device_client.connect()
blob_name = os.path.basename(PATH_TO_FILE)
storage_info = device_client.get_storage_info_for_blob(blob_name)
store_blob(storage_info, PATH_TO_FILE)
device_client.shutdown()
Helper method
def store_blob(blob_info, file_name):
try:
sas_url = "https://{}/{}/{}{}".format(
blob_info["hostName"],
blob_info["containerName"],
blob_info["blobName"],
blob_info["sasToken"]
)
print("\nUploading file: {} to Azure Storage as blob: {} in container {}\n".format(file_name, blob_info["blobName"], blob_info["containerName"]))
# Upload the specified file
with BlobClient.from_blob_url(sas_url) as blob_client:
with open(file_name, "rb") as f:
result = blob_client.upload_blob(f, overwrite=True)
return (True, result)
except FileNotFoundError as ex:
# catch file not found and add an HTTP status code to return in notification to IoT Hub
ex.status_code = 404
return (False, ex)
except AzureError as ex:
# catch Azure errors that might result from the upload operation
return (False, ex)
This is the error log (Edited)
Result: Failure Exception: OSError: [Errno 30] Read-only file system: './nasa_graphics_manual_nhb_1430-2_jan_1976.pdfjo243l48.tmp' Stack: File "/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py", line 407, in _handle__invocation_request call_result = await self._loop.run_in_executor( File "/usr/local/lib/python3.9/concurrent/futures/thread.py", line 58, in run result = self.fn(*self.args, **self.kwargs) File "/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py", line 649, in _run_sync_func return ExtensionManager.get_sync_invocation_wrapper(context, File "/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py", line 215, in _raw_invocation_wrapper result = function(**args) File "/home/site/wwwroot/azure-function-timer/__init__.py", line 134, in main PATH_TO_FILE = wget.download("https://www.nasa.gov/sites/default/files/atoms/files/nasa_graphics_manual_nhb_1430-2_jan_1976.pdf", out=os.getcwd()) # wget to get the filename and path File "/home/site/wwwroot/.python_packages/lib/site-packages/wget.py", line 506, in download (fd, tmpfile) = tempfile.mkstemp(".tmp", prefix=prefix, dir=".") File "/usr/local/lib/python3.9/tempfile.py", line 336, in mkstemp return _mkstemp_inner(dir, prefix, suffix, flags, output_type) File "/usr/local/lib/python3.9/tempfile.py", line 255, in _mkstemp_inner fd = _os.open(file, flags, 0o600)
What you can do is containerize the function using docker and inside the container the place the file so that you can read the file later.
Because if you don't containerize the function, only the function's code will be deployed and not the file.
Refer this documentation for Indepth explanation.

Python ftplib.error_perm 550: No such file or directory?

I've written a Python script that is part of my attempt to automate daily ftp transfers from my server. I've tested the script with a number of files and file types (html, mp3, png, jpg, etc.) and everything seems to work out fine so far.
However, when I try to download a simple text file, 'file.txt' (9 kb), the download fails, although I account for text files and switch from binary to text mode for the transfer. The following exception is thrown by ftplib:
ftplib.error_perm: 550 file.txt: No such file or directory
Here's my script:
from ftplib import FTP_TLS, error_perm
import os
def open_connection(server, user, pwd, work_dir=None):
global ftps
try:
ftps = FTP_TLS(host=server)
ftps.login(user=user, passwd=pwd)
ftps.prot_p() # switch to secure data connection
if work_dir != None:
ftps.cwd(work_dir)
else:
pass
except:
pass
def download_file(remote_path, local_path):
remote_file = os.path.basename(remote_path)
local_file_path = os.path.join(local_path, remote_file)
# differentiate between text and binary files
file_type, encoding = guess_type_and_encoding(remote_file)
# possibly needs a permission exception catch
if file_type.split("/")[0] == "text" and encoding == None:
# use text mode for transfer
local_file = open(local_file_path, 'w')
def callback(line): local_file.write(line + "\n")
ftps.retrlines("RETR " + remote_file, callback)
local_file.close()
else:
# use binary mode for transfer
local_file = open(local_file_path, 'wb')
ftps.retrbinary("RETR " + remote_file, local_file.write)
local_file.close()
return
def guess_type_and_encoding(filename):
from mimetypes import guess_type, add_type
add_type('text/x-python-win', '.pyw') # not in tables
mimetype, encoding = guess_type(filename, False) # allow extras
mimetype = mimetype or "?/?" # type unknown
return mimetype, encoding
open_connection(server, user, pwd, work_dir)
download_file("/files/dir/file.txt", "/Users/username/Desktop")
ftps.close()
I don't get why the error is raised!? The arguments 'remote_path' and 'local_path' are correctly provided. Both paths exist! 'file.txt' exists on the server under /files/dir and /Users/username/Desktop points to my desktop on OS X.
Here's the detailed ftplib error:
Traceback (most recent call last):
File "ftp2.py", line 138, in <module>
download_file("/files/dir/file.txt", "/Users/username/Desktop")
File "ftp2.py", line 93, in download_file
ftps.retrlines("RETR " + remote_file, callback)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 735, in retrlines
conn = self.transfercmd(cmd)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 376, in transfercmd
return self.ntransfercmd(cmd, rest)[0]
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 710, in ntransfercmd
conn, size = FTP.ntransfercmd(self, cmd, rest)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 339, in ntransfercmd
resp = self.sendcmd(cmd)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 249, in sendcmd
return self.getresp()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ftplib.py", line 224, in getresp
raise error_perm, resp
ftplib.error_perm: 550 file.txt: No such file or directory
Any help is greatly appreciated.
Thanks. :)
Try to
replace remote_file
in ftps.retrlines("RETR " + remote_file, callback)
with remote_path.

python requests api not posting in-memory zipped file

question: How can I get this to work
I'm trying to use the python requests api to send a zipped file to a server. I saw this method in the docs:
r = requests.post(url, files=open('foo.png', 'rb'))
but the difference between what I'm doing, is that the zipped file that I have is in memory, there's just a python object, no physical zipped version of the file is created:
I'm using the zipfile api, and this is how I'm creating my zip file:
inMemoryOutputFile = StringIO()
outFile = zipfile.ZipFile(inMemoryOutputFile, "w",
compression=zipfile.ZIP_DEFLATED)
and trying the following (after writing to the zip file):
r = requests.post(url, outFile)
however its not working, looks like the object is not being recognized as a parameter. here's the stack trace
Traceback (most recent call last): File
"/Users/abdulahmad/Desktop/upload-script-ve/bin/cogs", line 11, in
<module>
sys.exit(main()) File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/cogs/run.py",
line 396, in main
return run(sys.argv) File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/cogs/run.py",
line 384, in run
return instance() File "/Users/abdulahmad/Desktop/upload-script-ve//src/ctl.py",
line 53, in __call__
handler = uploader(self.url, self.file) File "/Users/abdulahmad/Desktop/upload-script-ve//src/uploader.py",
line 24, in __call__
response = self.session.post(url, files=payload)
#this is where I'm adding the file (the payload)
File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/sessions.py",
line 511, in post
return self.request('POST', url, data=data, json=json, **kwargs) File
"/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/sessions.py",
line 454, in request
prep = self.prepare_request(req) File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/sessions.py",
line 388, in prepare_request
hooks=merge_hooks(request.hooks, self.hooks), File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/models.py",
line 296, in prepare
self.prepare_body(data, files, json) File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/models.py",
line 447, in prepare_body
(body, content_type) = self._encode_files(files, data) File "/Users/abdulahmad/Desktop/upload-script-ve/lib/python2.7/site-packages/requests/models.py",
line 150, in _encode_files
fdata = fp.read() TypeError: read() takes at least 2 arguments (1 given)
actual code:
inMemoryOutputFile = StringIO()
parentDir, dirToZip = os.path.split(dirPath)
def trimPath(path):
archivePath = path.replace(parentDir, "", 1)
if parentDir:
archivePath = archivePath.replace(os.path.sep, "", 1)
if not includeDirInZip:
archivePath = archivePath.replace(dirToZip + os.path.sep, "", 1)
return os.path.normcase(archivePath)
outFile = zipfile.ZipFile(inMemoryOutputFile, "w",
compression=zipfile.ZIP_DEFLATED)
for (archiveDirPath, dirNames, fileNames) in os.walk(dirPath):
for fileName in fileNames:
filePath = os.path.join(archiveDirPath, fileName)
outFile.write(filePath, trimPath(filePath))
if not fileNames and not dirNames:
zipInfo = zipfile.ZipInfo(trimPath(archiveDirPath) + "/")
outFile.writestr(zipInfo, "")
outFile.close()
return outFile
You need to pass the StringIO buffer to requests, not the ZipFile. ZipFile.read("somefile.txt") reads an uncompressed file from the archive, it doesn't read the compressed binary stream. That read requires 1 parameter and that's why you got the strange error message. Rewind the file before posting or the POST data will be empty.
This example shows you the workflow.
import zipfile
from cStringIO import StringIO
import requests
import logging
logging.basicConfig(level=logging.DEBUG)
buf = StringIO()
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zippy:
zippy.write('somefile.txt')
buf.seek(0)
requests.post('http://localhost:8080',
headers = {'content-type': 'application/octet-stream'},
data=buf)

Send file from client to server using XMLRPC?

I want to write Python code to send a file from client to server. server needs to save the file sent from the client. But my code have some bugs which I cannot fix. Below is my server code:
# server.py
from SimpleXMLRPCServer import SimpleXMLRPCServer
import os
server = SimpleXMLRPCServer(('localhost', 9000))
def save_data(data):
handle = open("x123.dat", "wb")
handle.write(data)
handle.close()
server.register_function(save_data, 'save_data')
server.serve_forever()
And the client code:
# client.py
import sys, xmlrpclib
proxy = xmlrpclib.Server('http://localhost:9000')
handle = open(sys.argv[1], "rb")
proxy.save_data(handle.read())
handle.close()
But then I run my code, the client returns the following error (this is on Windows):
Traceback (most recent call last):
File "client.py", line 6, in <module> proxy.save_data(handle.read())
File "c:\python27\lib\xmlrpclib.py", line 1224, in __call__
return self.__send(self.__name, args)
File "c:\python27\lib\xmlrpclib.py", line 1575, in __request
verbose=self.__verbose
File "c:\python27\lib\xmlrpclib.py", line 1264, in request
return self.single_request(host, handler, request_body, verbose)
File "c:\python27\lib\xmlrpclib.py", line 1297, in single_request
return self.parse_response(response)
File "c:\python27\lib\xmlrpclib.py", line 1473, in parse_response
return u.close()
File "c:\python27\lib\xmlrpclib.py", line 793, in close
raise Fault(**self._stack[0])
xmlrpclib.Fault: <Fault 1: "<class 'xml.parsers.expat.ExpatError'>:not well-formed (invalid token): line 7, column 1">
I have some questions:
How to fix the above bug?
My code needs to transfer some big files sometimes. Since my method is so simple, I doubt that it is efficient for moving big data. Could anybody please suggest a better method to move big files? (Of course it is better to use XMLRPC on Python)
Server side:
def server_receive_file(self,arg):
with open("path/to/save/filename", "wb") as handle:
handle.write(arg.data)
return True
Client side:
with open("path/to/filename", "rb") as handle:
binary_data = xmlrpclib.Binary(handle.read())
client.server_receive_file(binary_data)
This worked for me.
You want to look into the xmlrpclib Binary object. With this class you can encode and decode to/from a base64 string.
Here is how you do it:
#!/usr/bin/env python3.7
# rpc_server.py
# Fix missing module issue: ModuleNotFoundError: No module named 'SimpleXMLRPCServer'
#from SimpleXMLRPCServer import SimpleXMLRPCServer
from xmlrpc.server import SimpleXMLRPCServer
import os
# Put in your server IP here
IP='10.198.16.73'
PORT=64001
server = SimpleXMLRPCServer((IP, PORT))
def server_receive_file(arg, filename):
curDir = os.path.dirname(os.path.realpath(__file__))
output_file_path = curDir + '/' + filename
print('output_file_path -> ({})'.format(output_file_path))
with open(output_file_path, "wb") as handle:
handle.write(arg.data)
print('Output file: {}'.format(output_file_path))
return True
server.register_function(server_receive_file, 'server_receive_file')
print('Control-c to quit')
server.serve_forever()
### rpc_client.py
#!/usr/bin/env python3.7
import os
# client.py
import sys
# The answer is that the module xmlrpc is part of python3
import xmlrpc.client
#Put your server IP here
IP='10.198.16.73'
PORT=64001
url = 'http://{}:{}'.format(IP, PORT)
###server_proxy = xmlrpclib.Server(url)
client_server_proxy = xmlrpc.client.ServerProxy(url)
curDir = os.path.dirname(os.path.realpath(__file__))
filename = sys.argv[1]
fpn = curDir + '/' + filename
print(' filename -> ({})'.format(filename))
print(' fpn -> ({})'.format(fpn))
if not os.path.exists(fpn):
print('Missing file -> ({})'.format(fpn))
sys.exit(1)
with open(fpn, "rb") as handle:
binary_data = xmlrpc.client.Binary(handle.read())
client_server_proxy.server_receive_file(binary_data, filename)

Python: saving large web page to file

Let me start off by saying, I'm not new to programming but am very new to python.
I've written a program using urllib2 that requests a web page that I would then like to save to a file. The web page is about 300KB, which doesn't strike me as particularly large but seems to be enough to give me trouble, so I'm calling it 'large'.
I'm using a simple call to copy directly from the object returned from urlopen into the file:
file.write(webpage.read())
but it will just sit for minutes, trying to write into the file and I eventually receive the following:
Traceback (most recent call last):
File "program.py", line 51, in <module>
main()
File "program.py", line 43, in main
f.write(webpage.read())
File "/usr/lib/python2.7/socket.py", line 351, in read
data = self._sock.recv(rbufsize)
File "/usr/lib/python2.7/httplib.py", line 541, in read
return self._read_chunked(amt)
File "/usr/lib/python2.7/httplib.py", line 592, in _read_chunked
value.append(self._safe_read(amt))
File "/usr/lib/python2.7/httplib.py", line 649, in _safe_read
raise IncompleteRead(''.join(s), amt)
httplib.IncompleteRead: IncompleteRead(6384 bytes read, 1808 more expected)
I don't know why this should give the program so much grief?
EDIT |
here is how I'm retrieving the page
jar = cookielib.CookieJar()
cookie_processor = urllib2.HTTPCookieProcessor(jar);
opener = urllib2.build_opener(cookie_processor)
urllib2.install_opener(opener)
requ_login = urllib2.Request(LOGIN_PAGE,
data = urllib.urlencode( { 'destination' : "", 'username' : USERNAME, 'password' : PASSWORD } ))
requ_page = urllib2.Request(WEBPAGE)
try:
#login
urllib2.urlopen(requ_login)
#get desired page
portfolio = urllib2.urlopen(requ_page)
except urllib2.URLError as e:
print e.code, ": ", e.reason
I'd use a handy fileobject copier function provided by shutil module. It worked on my machine :)
>>> import urllib2
>>> import shutil
>>> remote_fo = urllib2.urlopen('http://docs.python.org/library/shutil.html')
>>> with open('bigfile', 'wb') as local_fo:
... shutil.copyfileobj(remote_fo, local_fo)
...
>>>
UPDATE: You may want to pass the 3rd argument to copyfileobj that controls the size of internal buffer used to transfer bytes.
UPDATE2: There's nothing fancy about shutil.copyfileobj. It simply reads a chunk of bytes from source file object and writes it the destination file object repeatedly until there's nothing more to read. Here's the actual source code of it that I grabbed from inside Python standard library:
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)

Categories