I have a download button that should return a users CSV. The code for it below:
class StartCSVHandler(ThreeScaleResourceHandler):
""" Starts generating a user's CSV file """
allowed_methods = ('new',)
requires_input = False
def post(self, *args, **kwargs):
user = self.current_user()
if not user:
self.abort(401, detail=ERROR_NOT_AUTHORIZED)
task_url = '/jobs/users/%s/data/csv' % user.key()
taskqueue.add(url=task_url, queue_name='users')
return {}
def generate_data_from_query(query, user, format, handler, filename, mission=None):
batch_size = 500
cursor = None
spottings = []
count = 0
mime_type = 'text/csv' if format == CSV_FORMAT else 'text/xml'
timestamp = '%0.6f' % time.time()
gcs_filename_template = '/{bucket}/{user}/{timestamp}/{filename}'
from global_config import config
gcs_filename = gcs_filename_template.format(
bucket='project-noah-backups',
# bucket=config['cloudstorage']['export_files_bucket'],
user=str(user.key()),
timestamp=timestamp,
filename=filename
)
logging.debug(str(user.key()))
f = cloudstorage.open(gcs_filename, mode='w', content_type=mime_type)
# blobstore_file_name = files.blobstore.create(mime_type=mime_type, _blobinfo_uploaded_filename=filename.encode('utf-8'))
while True:
if format == CSV_FORMAT:
writer = utils.UnicodeWriter(f)
if count == 0:
writer.writerow(csv_display_names)
elif format == KML_FORMAT and count == 0:
f.write(template.render('spotting_export_pre.kml', {}))
if cursor:
query.with_cursor(cursor)
spottings = query.fetch(batch_size)
if format == CSV_FORMAT:
dicts = [s.as_dict() for s in spottings]
logging.debug(dicts)
for spotting_dict in dicts:
writer.writerow([spotting_dict[k] for k in csv_keys])
elif format == KML_FORMAT:
output = template.render('spotting_export_mid.kml', {'spottings' : spottings, 'server_url' : utils.server_url(handler.request)})
f.write(output.encode('utf-8'))
cursor = query.cursor()
logging.info('written spottings %d to %d' % (count, count + len(spottings)))
count += len(spottings)
if not mission:
push_to_beacon_user(user, {'format':format,'progress':count})
else:
push_to_beacon_user_mission(user, mission, {'format':format,'progress':count})
if len(spottings) < batch_size:
break
if format == KML_FORMAT:
f.write(template.render('spotting_export_post.kml', {}))
blob_key = BlobKey(blobstore.create_gs_key(u'/gs' + gcs_filename))
logging.debug(blob_key)
return blob_key
def generate_data_from_user_spottings(user, format, handler):
filename = u'My-Spottings.%s' % format
# query = user.mySpottings
query = user.mySpottings
logging.debug(query)
return generate_data_from_query(query, user, format, handler, filename)
class GenerateUserDataHandler(NoahHandler):
def post(self, user_key=None, format=None):
if not user_key:
return
user = NoahUser.get(user_key)
if not user:
return
if format not in (CSV_FORMAT, KML_FORMAT):
return
blob_key = generate_data_from_user_spottings(user, format, self)
user = NoahUser.get(user_key)
if format == CSV_FORMAT:
if user.csv:
user.csv.delete()
user.csv = blob_key
user.put()
elif format == KML_FORMAT:
if user.kml:
user.kml.delete()
user.kml = blob_key
user.put()
logging.debug(user.recent_spottings)
logging.debug(str(blob_key))
push_to_beacon_user(user, {'format': format,'url':'/data?key=%s' % str(blob_key)})
class ThreeScaleResourceHandler(ResourceHandler):
#three_scale_authenticate
def get(self, *args, **kwargs):
super(ThreeScaleResourceHandler, self).get(*args, **kwargs)
#three_scale_authenticate
def post(self, *args, **kwargs):
super(ThreeScaleResourceHandler, self).post(*args, **kwargs)
#three_scale_authenticate
def put(self, *args, **kwargs):
super(ThreeScaleResourceHandler, self).put(*args, **kwargs)
#three_scale_authenticate
def delete(self, *args, **kwargs):
super(ThreeScaleResourceHandler, self).delete(*args, **kwargs)
This should download users data in the form of an CSV. The problem i am getting is two fold; firstly, the end point that this generates is '/api/v1/users/me/data/csv' and when visiting it, i receive the following error
{"error": {"title": "Unauthorized", "status": 401, "message": "You are not authorized to perform that action. Please use the api_key parameter with your registered key."}}
Secondly, the link it provides for the user to save cannot be found:
http://localhost:8080/data?key=encoded_gs_file:cHJvamVjdC1ub2FoLWJhY2t1cHMvYWdoa1pYWi1UbTl1WlhJVkN4SUlUbTloYUZWelpYSVlnSUNBZ0lDQWdBb00vMTU4MTAxODk3My4wODgyODEvTXktU3BvdHRpbmdzLmNzdg==
I am not entirely sure what i need to correct.
firstly, the end point that this generates is '/api/v1/users/me/data/csv' and when visiting it, i receive the following error: {"error": {"title": "Unauthorized", "status": 401, "message": "You are not authorized to perform that action. Please use the api_key parameter with your registered key."}}
Which handler in your code snippet handles /api/v1/users/me/data/csv? Is it StartCSVHandler? Are you sure it isn't being thrown because of this line? self.abort(401, detail=ERROR_NOT_AUTHORIZED)
Secondly, the link it provides for the user to save cannot be found: http://localhost:8080/data?key=encoded_gs_file:cHJvamVjdC1ub2FoLWJhY2t1cHMvYWdoa1pYWi1UbTl1WlhJVkN4SUlUbTloYUZWelpYSVlnSUNBZ0lDQWdBb00vMTU4MTAxODk3My4wODgyODEvTXktU3BvdHRpbmdzLmNzdg==
In what way? like you are trying to find the file on your machine or this link is throwing a 404?
On your localhost, encoded_gs_file files can be found here: http://localhost:8000/blobstore
If it's a 404, then what does your handler for /data do? It doesnt look like it's in your code snippet
Related
As far as I can tell the assert calls for id and goal_id AND my code provides them both...
enter image description here
goal_routes.py
from datetime import datetime from typing import OrderedDict from
urllib.request import OpenerDirector from flask import Blueprint,
jsonify, request, make_response, abort from app import db from
app.models.goal import Goal from app.models.task import Task from
app.task_routes import validate_task
Create a Goal: goal_bp = Blueprint("goal_bp", name, url_prefix="/goals")
#goal_bp.route("", methods = ["POST"]) def create_goals():
request_body = request.get_json()
if "title" in request_body:
new_goal = Goal(
title = request_body["title"]
)
else:
return jsonify({"details":"Invalid data"}), 400
db.session.add(new_goal)
db.session.commit()
goal_response = {"goal": new_goal.to_dictionary()}
return (jsonify(goal_response), 201)
Get Goals #goal_bp.route("", methods = ["GET"]) def get_goals():
sort = request.args.get("sort")
#Sort by assending (is default?)
if sort == "asc":
goals =Goal.query.order_by(Goal.title)
#Sort by decending
elif sort == "desc":
goals =Goal.query.order_by(Goal.title.desc())
#No Sort
else:
goals = Goal.query.all()
goals_response = []
for goal in goals:
goals_response.append(goal.to_dictionary())
# If No Saved Goals wil stil return 200
return (jsonify(goals_response), 200)
Get One Goal: One Saved Goal #goal_bp.route("/<goal_id>", methods=["GET"]) def get_one_goal(goal_id):
goal = validate_goal(goal_id)
goal_response = {"goal": goal.to_dictionary()}
return (jsonify(goal_response), 200)
Update Goal #goal_bp.route("/<goal_id>", methods=["PUT"]) def update_goal(goal_id):
goal = validate_goal(goal_id)
request_body = request.get_json()
goal.title = request_body["title"]
db.session.commit()
goal_response = {"goal": goal.to_dictionary()}
return (jsonify(goal_response), 200)
Goal Complete #goal_bp.route("/<goal_id>/mark_complete", methods=["PATCH"]) def goal_complete(goal_id):
goal = validate_goal(goal_id)
goal.completed_at = datetime.utcnow()
db.session.commit()
goal_response = {"goal": goal.to_dictionary()}
return (jsonify(goal_response), 200)
Goal Incomplete #goal_bp.route("/<goal_id>/mark_incomplete", methods=["PATCH"]) def goal_incomplete(goal_id):
goal = validate_goal(goal_id)
goal.completed_at = None
db.session.commit()
goal_response = {"goal": goal.to_dictionary()}
return (jsonify(goal_response), 200)
Delete Goal: Deleting a Goal #goal_bp.route("/<goal_id>", methods=["DELETE"]) def delete_goal(goal_id):
goal = validate_goal(goal_id)
db.session.delete(goal)
db.session.commit()
response = {"details": f"Goal {goal.goal_id} \"{goal.title}\" successfully deleted"}
return (jsonify(response), 200)
Validate there are no matching Goal: Get, Update, and Delete
def validate_goal(goal_id):
try:
goal_id = int(goal_id)
except:
abort(make_response({"message": f"Goal {goal_id} is invalid"}, 400))
goal = Goal.query.get(goal_id)
if not goal:
abort(make_response({"message": f"Goal {goal_id} not found"}, 404))
return goal
#goal_bp.route("/<goal_id>/tasks", methods=["POST"]) def
post_task_ids_to_goal(goal_id):
goal = validate_goal(goal_id)
request_body = request.get_json()
for task_id in request_body["task_ids"]:
task = Task.query.get(task_id)
task.goal_id = goal_id
task.goal = goal
db.session.commit()
return jsonify({"id":goal.goal_id, "task_ids": request_body["task_ids"]}), 200
#goal_bp.route("/<goal_id>/tasks", methods=["GET"]) def
get_tasks_for_goal(goal_id):
goal = validate_goal(goal_id)
task_list = [task.to_dictionary() for task in goal.tasks]
goal_dict = goal.to_dictionary()
goal_dict["tasks"] = task_list
return jsonify(goal_dict)
goal.py
from app import db
class Goal(db.Model):
goal_id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String, nullable=False)
tasks = db.relationship("Task", back_populates="goals", lazy = True)
def to_dictionary(self):
goal_dict = {
"id": self.goal_id,
"title": self.title
}
if self.tasks:
goal_dict["tasks"] = [task.task_id for task in self.tasks]
return goal_dict
Background:
I built an app in which, as soon as a user browse a file and clicks on the "upload" button, a behind-the-scenes calculation takes place, which can take long minutes, and at the end the output is uploaded to GCP. I want the calculation (after clicking) not to stuck the app and that in the meantime the user will be moved to another page. At the end of the process he will receive a success/failure email.
To achieve this goal I try to use multiprocessing.
I'm having much trouble trying to turn it to multi-processed app - I keep gettin the following error message when I'm trying to start a process:
_pickle.PicklingError: Can't pickle <class 'wtforms.form.Meta'>: attribute lookup Meta on wtforms.form failed
app.py -
The main function is `upload_file:
#app.route('/')
def home():
return redirect(url_for('upload_file', page_num=1))
#app.route('/<int:page_num>', methods=['GET', 'POST'])
def upload_file(page_num=1):
form = CreateXslxForm()
traffic_data = get_page_of_traffic_data(page_num)
queue = Queue()
p = Process(target=main_func, args=(queue, form))
p.start()
proc_ret = queue.get()
if proc_ret:
upload_success(*proc_ret)
else:
return render_template(constants.GeneralConstants.LANDING_PAGE, title='URL Comparison', form=form,
traffic_data=traffic_data)
def get_page_of_traffic_data(page_num):
traffic_data = urls_comparison_users_traffic.query.paginate(per_page=5, page=int(page_num), error_out=True)
return traffic_data
def upload_success(link, traffic_row, blob_name):
add_data_to_db(traffic_row)
return render_template('upload_success.html', title='Upload File', file_path=link, filename=blob_name)
def add_data_to_db(traffic_row):
# MANUAL PRE PING
try:
db.session.execute("SELECT 1;")
db.session.commit()
except:
db.session.rollback()
finally:
db.session.close()
# SESSION COMMIT, ROLLBACK, CLOSE
try:
db.session.add(traffic_row)
db.session.commit()
send_response_mail_to_user(traffic_row)
except Exception as e:
db.session.rollback()
raise e
finally:
db.session.close()
def send_response_mail_to_user(traffic_data):
sendgrid_obj = sendgrid_handler.SendgridHandler(sendgrid_key=SENDGRID_KEY)
sendgrid_obj.mails_to_send = URLComparisonEmailBuilder.build_mails(traffic_data)
sendgrid_obj.send()
if __name__ == "__main__":
app.run(port=5000, debug=True, threaded=True)
main.py
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = r'gcp-cre.json'
storage_client = storage.Client()
def get_file_from_form(form):
# form = forms.CreateXslxForm()
timestr = time.strftime(constants.DatesFormats.DATETIME)
custom_name = f"{constants.GeneralConstants.DEST_DEFAULT_NAME}-{timestr}"
# traffic_data = get_page_of_traffic_data(page_num)
if form.validate_on_submit():
load_filename = secure_filename(form.input_file.data.filename)
valid_file_name = get_valid_filename(request.form["filename"])
if valid_file_name:
custom_name = f"{valid_file_name}"
url_comp_obj = URLCompare()
result = url_comp_obj.compare_all_urls(form.input_file.data)
row_data = {
"upload_file": form.input_file.data.filename,
"output_file": custom_name,
"gcp_link": None,
"user": 'nki-tov'
}
return custom_name, result, constants.GeneralConstants.BUCKET_NAME, row_data
def get_valid_filename(s):
s = s.strip().replace(' ', '_')
return re.sub(r'(?u)[^-\w]', '', s)
def upload_to_bucket(blob_name, file, bucket_name, row_data):
'''
Upload file to a bucket
: blob_name (str) - object name
: file_path (str)
: bucket_name (str)
'''
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob(blob_name)
blob.upload_from_file(file)
link = f'https://console.cloud.google.com/storage/browser/_details/{bucket_name}/{blob_name};tab=live_object?authuser=0'
row_data["gcp_link"] = link
traffic_row = urls_comparison_users_traffic(**row_data)
return link, traffic_row, blob_name
def main_func(queue: Queue, form):
res = get_file_from_form(form)
if res:
queue.put(upload_to_bucket(*res))
else:
queue.put(res) # None
What am I doing wrong?
Is it something to do with the architecture?
I am using a class based service in python and I get error whenever I want to use it. Unable to figure out the reason.
#!/usr/bin/python
# -*- coding: utf-8 -*-
from xml.dom import minidom
from pysimplesoap.client import SoapClient
from pysimplesoap.helpers import sort_dict
MEDIA_ROOT = '/User/sunand/documents/resumes/'
parser = ResumeParser()
names = parser.get_names(MEDIA_ROOT)
print names
class ParserClient(SoapClient):
""" Extends the soap client to encode the response with utf-8 encoding.
"""
def wsdl_call(
self,
method,
*args,
**kwargs
):
""" Override wsdl_call method to make sure unmarshall is not called.
"""
operation = self.get_operation(method)
# get i/o type declarations:
inp = operation['input']
header = operation.get('header')
if 'action' in operation:
self.action = operation['action']
# construct header and parameters
if header:
self.__call_headers = sort_dict(header, self.__headers)
(method, params) = self.wsdl_call_get_params(method, inp,
*args, **kwargs)
response = self.call(method, *params)
return response
def send(self, method, xml):
""" Overrides the send method to get the actual xml content.
"""
content = super(ParserClient, self).send(method, xml)
self.result = content
return content
class ResumeParser(object):
""" Connects to the Resume Parser's XML api to get parsed data.
"""
def __init__(self, simple=True, timeout=60):
""" Initializes the ResumeParser class.
"""
self.wsdl = \
'http://jobsite.onlineresumeparser.com/rPlusParseResume.asmx?WSDL'
self.secret = 'my-secret-key' # Enter key here
self.encoding = 'base64'
self.simple = simple
self.client = ParserClient(wsdl=self.wsdl, timeout=timeout)
self.names = []
def get_file_content(self, file_path):
""" Return the encoded content for the given file.
"""
file_obj = open(os.path.abspath(file_path), 'r')
content = file_obj.read().encode(self.encoding)
file_obj.close()
return content
def get_names(self, path):
"""
Given a path to a folder that contains resume files this method
will parse the resumes and will return the names of the candidates
as a list.
"""
opt = os.path
resumes = [opt.join(path, r) for r in os.listdir(path)
if opt.isfile(opt.join(path, r))]
# Parse information for each resume.
for resume in resumes:
try:
xml_data = self.get_xml(resume)
name = self.get_name_from_xml(xml_data)
if name:
self.names.append(name)
except Exception, err:
# print name
print 'Error parsing resume: %s' % str(err)
return list(set(self.names))
def get_name_from_xml(self, data):
""" Returns the full name from the xml data given.
"""
xmldata = minidom.parseString(data)
name = xmldata.getElementsByTagName('CANDIDATE_FULL_NAME')
name = name[0].childNodes[0].data.title()
return name
def get_xml(self, filepath):
""" Fetches and returns the xml for the given file from the api.
"""
filename = os.path.basename(filepath)
extension = os.path.splitext(filepath)[1]
base64 = self.get_file_content(filepath)
filedata = {
'B64FileZippedContent': base64,
'FileName': filename,
'InputType': extension,
'UserID': 1,
'secretKey': self.secret,
}
get = \
(self.client.GetSimpleXML if self.simple else self.client.getHRXML)
get(**filedata)
return self.process_raw_xml()
def process_raw_xml(self, data=None):
""" Processes and returns the clean XML.
"""
raw = (data if data else self.client.result)
parsed = minidom.parseString(raw)
result = parsed.getElementsByTagName('GetSimpleXMLResult')[0]
text_node = result.childNodes[0]
data = text_node.data.encode('UTF-8')
return data
Upon running the code I am getting an error
TypeError: wsdl_call_get_params() got an unexpected keyword argument 'secretKey'
What am I doing wrong?
It looks like you are incorrectly overriding wsdl_call.
Firstly, we can see that SoapClient (which you extend in ParserClient), has a __getattr__ function that fetches pseudo-attributes of the SoapClient.
def __getattr__(self, attr):
"Return a pseudo-method that can be called"
if not self.services: # not using WSDL?
return lambda self=self, *args, **kwargs: self.call(attr,*args,**kwargs)
else: # using WSDL:
return lambda *args, **kwargs: self.wsdl_call(attr,*args,**kwargs)
You can see that this function is using wsdl_call to help it map functions to unknown attributes.
The specific pseudo-method that is causing the problem is in your code (or appears to be):
filedata = {
'B64FileZippedContent': base64,
'FileName': filename,
'InputType': extension,
'UserID': 1,
'secretKey': self.secret, # <-- the secretKey key word argument
}
get = \
(self.client.GetSimpleXML if self.simple else self.client.getHRXML)
get(**filedata)
# here client is an instance of your `ParserClient` (and `SoapClient`).
This above bit took me a while to track down. With a full stack trace I would have found it much quicker. Please always post stack traces (when there is one) in future when asking for help.
How to solve this
Provide a concrete implementation of GetSimpleXML and getHRXML. This will solve the immediate problem, but not the larger problem.
Rewrite wsdl_call
The rewritten section of code should check the value of the method argument and either do what you want, or delegate to the SoapClient implementation.
eg.
def wsdl_call(self, method, *args, **kwargs):
if method == "some_method":
return self._my_wsdl_call(method, *args, **kwargs)
else:
return super(ParserClient, self).wsdl_call(method, *args, **kwargs)
def _my_wsdl_call(self, method, *args, **kwargs):
...
How to configure jinja2 in Appengine to:
Auto reload when template is updated.
Enable bytecode cache, so it can be share among each instances. I prefer jinja2 to produce bytecode when compiling template, and store it to datastore. So next instance will load bytecode instead of repeatedly compile the template.
I have added the bcc like this, using the app engine memcache Client()::
loader = dynloaders.DynLoader() # init Function loader
bcc = MemcachedBytecodeCache(memcache.Client(), prefix='jinja2/bytecode/', timeout=None)
return Environment(auto_reload=True, cache_size=100, loader=FunctionLoader(loader.load_dyn_all),
bytecode_cache=bcc)
My function loader:
def html(self, cid):
def _html_txt_up_to_date(): # closure to check if template is up to date
return CMSUpdates.check_no_update(cid, template.modified)
template = ndb.Key('Templates', cid, parent=self.parent_key).get()
if not template:
logging.error('DynLoader (HTML/TXT): %s' % cid)
return None # raises TemplateNotFound exception
return template.content, None, _html_txt_up_to_date
The template model uses template.modified : ndb.DateTimeProperty(auto_now=True)
The closure function:
class CMSUpdates(ndb.Model):
updates = ndb.JsonProperty()
#classmethod
def check_no_update(cls, cid, cid_modified):
cms_updates = cls.get_or_insert('cms_updates', updates=dict()).updates
if cid in cms_updates: # cid modified has dt microseconds
if cid_modified >= datetime.strptime(cms_updates[cid], '%Y-%m-%d %H:%M:%S'):
if (datetime.now() - timedelta(days=1)) > cid_modified:
del cms_updates[cid]
cls(id='cms_updates', updates=cms_updates).put_async()
return True
return False # reload the template
return True
Been few weeks i looking for the solution. And finally i figured it out, i would like to share my code for everyone. There are 4 python source files in my code.
TemplateEngine.py, ContentRenderer.py, TestContent.py & Update_Template.py
File: TemplateEngine.py
Note:
i use now = datetime.utcnow() + timedelta(hours=8) because my timezone is GMT+8
You must use ndb.BlobProperty to store the bytecode, ndb.TextProperty will not work!
from google.appengine.ext import ndb
from datetime import datetime,timedelta
class SiteTemplates(ndb.Model):
name = ndb.StringProperty(indexed=True, required=True)
data = ndb.TextProperty()
uptodate = ndb.BooleanProperty(required=True)
class SiteTemplateBytecodes(ndb.Model):
key = ndb.StringProperty(indexed=True, required=True)
data = ndb.BlobProperty(required=True)
mod_datetime = ndb.DateTimeProperty(required=True)
class LocalCache(jinja2.BytecodeCache):
def load_bytecode(self, bucket):
q = SiteTemplateBytecodes.query(SiteTemplateBytecodes.key == bucket.key)
if q.count() > 0:
r = q.get()
bucket.bytecode_from_string(r.data)
def dump_bytecode(self, bucket):
now = datetime.utcnow() + timedelta(hours=8)
q = SiteTemplateBytecodes.query(SiteTemplateBytecodes.key == bucket.key)
if q.count() > 0:
r = q.get()
r.data = bucket.bytecode_to_string()
r.mod_datetime = now
else:
r = SiteTemplateBytecodes(key=bucket.key, data=bucket.bytecode_to_string(), mod_datetime=now)
r.put()
def Update_Template_Source(tn, source):
try:
q = SiteTemplates.query(SiteTemplates.name == tn)
if q.count() == 0:
u = mkiniTemplates(name=tn, data=source, uptodate=False)
else:
u = q.get()
u.name=tn
u.data=source
u.uptodate=False
u.put()
return True
except Exception,e:
logging.exception(e)
return False
def Get_Template_Source(tn):
uptodate = False
def Template_Uptodate():
return uptodate
try:
q = SiteTemplates.query(SiteTemplates.name == tn)
if q.count() > 0:
r = q.get()
uptodate = r.uptodate
if r.uptodate == False:
r.uptodate=True
r.put()
return r.data, tn, Template_Uptodate
else:
return None
except Exception,e:
logging.exception(e)
return None
File: ContentRenderer.py
Note: It is very important to set cache_size=0, otherwise bytecode cache function will be disable. I have no idea why.
from TemplateEngine import Get_Template_Source
import jinja2
def Render(tn,tags):
global te
return te.Render(tn, tags)
bcc = LocalCache()
te = jinja2.Environment(loader=jinja2.FunctionLoader(Get_Template_Source), cache_size=0, extensions=['jinja2.ext.autoescape'], bytecode_cache=bcc)
File: Update_Template.py
Note: Use Update_Template_Source() to update template source to datastore.
from TemplateEngine import Update_Template_Source
template_source = '<html><body>hello word to {{title}}!</body></html>'
if Update_Template_Source('my-template.html', template_source):
print 'template is updated'
else:
print 'error when updating template source'
File: TestContent.py
Note: Do some test
from ContentRenderer import Render
print Render('my-template.htmnl', {'title':'human'})
'hello world to human!'
You will realize, even you have more than 20 instances in your application, the latency time will not increase, even you update your template. And the template source will update in 5 to 10 seconds.
I was wondering if it is possible to create an upload function to upload picture through my own site to the gravatar site?
Yes, this is possible. See http://en.gravatar.com/site/implement/xmlrpc/ , specifically the grav.saveData or grav.SaveUrl calls.
Yes it's possible!
import base64
from xmlrpclib import ServerProxy, Fault
from hashlib import md5
class GravatarXMLRPC(object):
API_URI = 'https://secure.gravatar.com/xmlrpc?user={0}'
def __init__(self, request, password=''):
self.request = request
self.password = password
self.email = sanitize_email(request.user.email)
self.email_hash = md5_hash(self.email)
self._server = ServerProxy(
self.API_URI.format(self.email_hash))
def saveData(self, image):
""" Save binary image data as a userimage for this account """
params = { 'data': base64_encode(image.read()), 'rating': 0, }
return self._call('saveData', params)
#return self.useUserimage(image)
def _call(self, method, params={}):
""" Call a method from the API, gets 'grav.' prepended to it. """
args = { 'password': self.password, }
args.update(params)
try:
return getattr(self._server, 'grav.' + method, None)(args)
except Fault as error:
error_msg = "Server error: {1} (error code: {0})"
print error_msg.format(error.faultCode, error.faultString)
def base64_encode(obj):
return base64.b64encode(obj)
def sanitize_email(email):
return email.lower().strip()
def md5_hash(string):
return md5(string.encode('utf-8')).hexdigest()
Just call the class in your view :)