I'm using an open source web service python application to send email through GAE but if the name or email body contains Arabic or Hebrew characters the application throws some errors (e.g "The indicated parameters are not valid"). Therefore I need to know how to fix this issue. I have to note that I'm a Python beginner (one week since I started playing with Python).
#
import cgi
import os
import logging
import contextlib
from xml.dom import minidom
from xml.dom.minidom import Document
import exceptions
import warnings
import imghdr
from google.appengine.api import images
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import template
from google.appengine.api import mail
import wsgiref.handlers
# START Constants
CONTENT_TYPE_HEADER = "Content-Type"
CONTENT_TYPE_TEXT = "text/plain"
XML_CONTENT_TYPE = "application/xml"
XML_ENCODING = "utf-8"
"""
Allows you to specify IP addresses and associated "api_key"s to prevent others from using your app.
Storage and Manipulation methods will check for this "api_key" in the POST/GET params.
Retrieval methods don't use it (however you could enable them to use it, but maybe rewrite so you have a "read" key and a "write" key to prevent others from manipulating your data).
Set "AUTH = False" to disable (allowing anyone use your app and CRUD your data).
To generate a hash/api_key visit https://www.grc.com/passwords.htm
To find your ip visit http://www.whatsmyip.org/
"""
AUTH = False
# END Constants
# START Exception Handling
class Error(StandardError):
pass
class Forbidden(Error):
pass
logging.getLogger().setLevel(logging.DEBUG)
#contextlib.contextmanager
def mailExcpHandler(ctx):
try:
yield {}
except (ValueError), exc:
xml_error_response(ctx, 400 ,'app.invalid_parameters', 'The indicated parameters are not valid: ' + exc.message)
except (Forbidden), exc:
xml_error_response(ctx, 403 ,'app.forbidden', 'You don\'t have permission to perform this action: ' + exc.message)
except (Exception), exc:
xml_error_response(ctx, 500 ,'system.other', 'An unexpected error in the web service has happened: ' + exc.message)
def xml_error_response(ctx, status, error_id, error_msg):
ctx.error(status)
doc = Document()
errorcard = doc.createElement("error")
errorcard.setAttribute("id", error_id)
doc.appendChild(errorcard)
ptext = doc.createTextNode(error_msg)
errorcard.appendChild(ptext)
ctx.response.headers[CONTENT_TYPE_HEADER] = XML_CONTENT_TYPE
ctx.response.out.write(doc.toxml(XML_ENCODING))
# END Exception Handling
# START Helper Methods
def isAuth(ip = None, key = None):
if AUTH == False:
return True
elif AUTH.has_key(ip) and key == AUTH[ip]:
return True
else:
return False
# END Helper Methods
# START Request Handlers
class Send(webapp.RequestHandler):
def post(self):
"""
Sends an email based on POST params. It will queue if resources are unavailable at the time.
Returns "Success"
POST Args:
to: the receipent address
from: the sender address (must be a registered GAE email)
subject: email subject
body: email body content
"""
with mailExcpHandler(self):
# check authorised
if isAuth(self.request.remote_addr,self.request.POST.get('api_key')) == False:
raise Forbidden("Invalid Credentials")
# read data from request
mail_to = str(self.request.POST.get('to'))
mail_from = str(self.request.POST.get('from'))
mail_subject = str(self.request.POST.get('subject'))
mail_plain = str(self.request.POST.get('plain'))
mail_html = str(self.request.POST.get('html'))
message = mail.EmailMessage()
message.sender = mail_from
message.to = mail_to
message.subject = mail_subject
message.body = mail_plain
if mail_html != None and mail_html != "":
message.html = mail_html
message.send()
self.response.headers[CONTENT_TYPE_HEADER] = CONTENT_TYPE_TEXT
self.response.out.write("Success")
# END Request Handlers
# START Application
application = webapp.WSGIApplication([
('/send', Send)
],debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
# END Application
mail_to = str(self.request.POST.get('to'))
mail_from = str(self.request.POST.get('from'))
mail_subject = str(self.request.POST.get('subject'))
mail_plain = str(self.request.POST.get('plain'))
mail_html = str(self.request.POST.get('html'))
I doubt you need to convert them to strings. Try without str(), it could work.
Related
I am setting up a flask server that will act as a webhook to the twitter account activity api. However I came up with this issue that I have no idea how to solve, I'm fairly new to programming so please bear with me. I just used this repository https://github.com/RickRedSix/twitter-webhook-boilerplate-python/blob/master/Main.py
This is the error:
line 28, in twitterCrcValidation
key=bytes(CONSUMER_SECRET, encoding ='utf-8'),
TypeError: encoding without a string argument
Here's the code:
#!/usr/bin/env python
from flask import Flask, request, send_from_directory, make_response
from http import HTTPStatus
import Twitter, hashlib, hmac, base64, os, logging, json
from dotenv import load_dotenv
load_dotenv('.env')
CONSUMER_SECRET = os.getenv('CONSUMER_SECRET')
CURRENT_USER_ID = os.getenv('CURRENT_USER_ID')
app = Flask(__name__)
#generic index route
#app.route('/')
def default_route():
return send_from_directory('www', 'index.html')
#The GET method for webhook should be used for the CRC check
#TODO: add header validation (compare_digest https://docs.python.org/3.6/library/hmac.html)
#app.route("/webhook", methods=["GET"])
def twitterCrcValidation():
crc = request.args['crc_token']
validation = hmac.new(
key=bytes(CONSUMER_SECRET, encoding ='utf-8'),
msg=bytes(crc, encoding = 'utf-8'),
digestmod = hashlib.sha256
)
digested = base64.b64encode(validation.digest())
response = {
'response_token': 'sha256=' + format(str(digested)[2:-1])
}
print('responding to CRC call')
return json.dumps(response)
#The POST method for webhook should be used for all other API events
#TODO: add event-specific behaviours beyond Direct Message and Like
#app.route("/webhook", methods=["POST"])
def twitterEventReceived():
requestJson = request.get_json()
#dump to console for debugging purposes
print(json.dumps(requestJson, indent=4, sort_keys=True))
if 'favorite_events' in requestJson.keys():
#Tweet Favourite Event, process that
likeObject = requestJson['favorite_events'][0]
userId = likeObject.get('user', {}).get('id')
#event is from myself so ignore (Favourite event fires when you send a DM too)
if userId == CURRENT_USER_ID:
return ('', HTTPStatus.OK)
Twitter.processLikeEvent(likeObject)
elif 'direct_message_events' in requestJson.keys():
#DM recieved, process that
eventType = requestJson['direct_message_events'][0].get("type")
messageObject = requestJson['direct_message_events'][0].get('message_create', {})
messageSenderId = messageObject.get('sender_id')
#event type isnt new message so ignore
if eventType != 'message_create':
return ('', HTTPStatus.OK)
#message is from myself so ignore (Message create fires when you send a DM too)
if messageSenderId == CURRENT_USER_ID:
return ('', HTTPStatus.OK)
Twitter.processDirectMessageEvent(messageObject)
else:
#Event type not supported
return ('', HTTPStatus.OK)
return ('', HTTPStatus.OK)
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 65010.
port = int(os.environ.get('PORT', 65010))
gunicorn_logger = logging.getLogger('gunicorn.error')
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
app.run(host='0.0.0.0', port=port, debug=True)
You need to verify input to your program and environment variables are no exception. As a minumum, check that these variables actually exist. os.getenv returns None if the environment variable doesn't exist. None is not a string and thus your error. You could do this with a slightly different os call.
CONSUMER_SECRET = os.environ['CONSUMER_SECRET']
CURRENT_USER_ID = os.environ['CURRENT_USER_ID']
Now an exception is raised on failure. This could be wrapped in an exception handler if you want different error reporting than the standard traceback.
I'm trying to restrict incoming emails to my app so it only accepts mail from members of a google group. More specifically, I want to only add the contents of the email to my datastore if they are part of the group. I've found the hasMember/IsMember method here: https://developers.google.com/admin-sdk/directory/v1/reference/members/hasMember and think this may be what I am looking for, but I do not know how to use it as they haven't provided an example and I'm very new to this.
Would this be the correct API to use for this? Here is my incoming mail handler code, I have added the IF statement comment to show what I would like to do:
import webapp2
import logging
from google.appengine.ext.webapp import mail_handlers
from google.appengine.api import mail
import os
from main import WorkRequest
import re
class IncomingMailHandler(mail_handlers.InboundMailHandler):
def receive(self, message):
(encoding, payload) = list(message.bodies(content_type='text/plain'))[0]
body_text = payload.decode()
logging.info('Received email message from %s, subject "%s": %s' %
(message.sender, message.subject, body_text))
logging.info (message.sender)
logging.info(message.subject)
logging.info(body_text)
#IF MESSAGE_SENDER == MEMBER OF GOOGLE GROUP:
wr = WorkRequest()
wr.email = message.sender
wr.userId = None
wr.title = message.subject
wr.content = body_text
wr.status = "OPEN"
wr.submission_type = "EMAIL"
wr.assigned_to = "UNASSIGNED"
wr.put()
application = webapp2.WSGIApplication([('/_ah/mail/.+', IncomingMailHandler)],debug=True)
I am developing a Gmail extracting app and using Gmail API to fetch mail from server. the problem lies in the fact that fetch time for mails is too large even though I used threading in back end framework. now I am going to implement one feature which will suggest user opting for bulk download that "once your download is ready, we will mail you" but for that i want to run download.py mentioned below in app tree in background and once the fetch is over it will get terminated.
And in the very bottom of the code i want to mail user that their download is ready but its not working though i have defined the mail server in settings.py .
download.py
import httplib2, base64
from stripogram import html2text
from oauth2client.django_orm import Storage
from apiclient.discovery import build
from oauth2client import client
from django.contrib.auth.models import User
from .models import CredentialsModel
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import authentication, permissions
from gextracto import models
from gextracto.models import UserData
from django.core.mail import EmailMessage
from django.core import mail
connection = mail.get_connection()
class ListMails(APIView):
"""
Gets a list of a specified number mail ids for a particular label
Extracts the email in the form of plain/text
The API returns all the extracted mails
"""
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def extract_headers(self, message):
"""
Extract the headers for a single mail and returns it
{To, From, Subject}
"""
needed_fields = ('From', 'To', 'Subject')
return {i['name']:i['value'] for i in message['payload']['headers'] if i['name'] in needed_fields}
def get_message_body(self, message):
"""
Get the body of an email
Recursively look for the body for different mimetypes
Returns the body as text/plain
"""
if 'payload' in message:
return self.get_message_body(message['payload'])
elif 'parts' in message:
return self.get_message_body(message['parts'][0])
else:
data = base64.urlsafe_b64decode(message['body']['data'].encode('ASCII'))
markdown_data = html2text(data)#.decode('utf-8', "replace")
data = data.replace("\n", "<br/>")
# return {markdown, html}
return {'markdown':unicode( markdown_data,"ISO-8859-1"), 'html':unicode(data,"ISO-8859-1")} if markdown_data else {'html':unicode(data,"ISO-8859-1")}
def message_content_html(self, userId, message_id, service):
"""
Make queries to get the content for a mail given its message id
Returns all the content
"""
content = {'id':message_id}
# try
message = service.users().messages().get(userId=userId, id=message_id).execute()
mimetype = message['payload']['mimeType']
if mimetype == 'text/html':
return {}
#
else:
body = self.get_message_body(message)
if body == "":
body = "<empty message>"
headers = self.extract_headers(message)
content['body'] = body
content.update(headers)
return content
def collect_mails(self, user, messages, service):
"""
Collect the content for all the mails currently downloaded
"""
all_messages = []
try:
for message in messages:
content = self.message_content_html(user.username, message['id'], service)
if content:
all_messages.append(content)
return all_messages
# return empty list if no messages were downloaded
except KeyError:
return []
def get(self, request, format=None):
"""
Handles the GET request to get all the mails for a label
Paginages through the GAPI content if required
API returns all the messages
{To, From, Subject, body}
"""
user = request.user
storage = Storage(CredentialsModel, 'id', user, 'credential')
credentials = storage.get()
http_auth = credentials.authorize(httplib2.Http())
service = build('gmail', 'v1', http=http_auth)
user_Id = user.username
label_id = request.GET['label']
# try
# call Google API with a request to get a list of all the labels
response = service.users().messages().list(userId=user_Id, labelIds=label_id, maxResults=100).execute()
all_messages = self.collect_mails(user, response['messages'], service)
if not all_messages:
return Response([])
else:
if 'nextPageToken' in response:
page_token_flag = True
# request more more mails if the download limit has not yet been satisfied
while(page_token_flag):
response = service.users().messages().list(userId=user_Id, pageToken=response['nextPageToken'], maxResults=100).execute()
all_messages.append(self.collect_mails(user, response['messages'], service))
print(all_messages)
#for x in range(0,len(all_messages)):
#b=all_messages[10]
#instance= UserData(user_id=user ,label=label_id, sender = b['From'] , subject=b['Subject'] , body=b['body'])
#instance.save()
page_token_flag = 'nextPageToken' in response
##
for x in range(0,len(all_messages)):
b=all_messages[10]
instance= UserData(user_id=user ,label=label_id, sender = b['From'] , subject=b['Subject'] , body=b['body'])
instance.save()
print ("Hi i am here!!!")
email = EmailMessage('Your Download Ready!', 'http://127.0.0.1:8000/admin/gextracto/userdata/', to=[user], connection=connection)
email.send()
connection.close()
return Response(all_messages)
Please tell me the way to run it in background. if need any other info please do ask. Thanks
Don't know the exact requirements but I'll think about Celery to run background tasks. This approach allows to manage all post-script activities in native Django manner.
Also you can think about running the Django script using cron (as manage.py command) - but it can lead to some limitations.
What about sending emails failure - believe, you don't need to close connection after sending email. Usually I use send_mail()/send_mass_mail() functions - please, check their code to get an idea.
I'm having an error on a python file. What it does is to get acces to an specific google API. OAuth2.0. But that's not the wrong part. The wrong part takes part of argparse (Retriving arguments from console by adding --something="").
Here's my code:
import argparse
import os
import pprint
import sys
import time
import httplib2
from apiclient import discovery
from oauth2client import file
from oauth2client import tools
from oauth2client import client
# Time to wait (in seconds) between successive checks of training status.
SLEEP_TIME = 10
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument('object_name',
help='Full Google Storage path of csv data (ex bucket/object)')
argparser.add_argument('id',
help='Model Id of your choosing to name trained model')
#argparser.add_argument('action');
def print_header(line):
'''Format and print header block sized to length of line'''
header_str = '='
header_line = header_str * len(line)
print '\n' + header_line
print line
print header_line
def main(argv):
parent_parsers = [tools.argparser]
parent_parsers.extend(parents)
parser = argparse.ArgumentParser(
description=doc,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=parent_parsers)
flags = parser.parse_args(argv[1:])
scope='https://www.googleapis.com/auth/prediction'
client_secrets = os.path.join(os.path.dirname(__file__),
'client_secrets.json')
flow = client.flow_from_clientsecrets(client_secrets,
scope=scope,
message=tools.message_if_missing(client_secrets))
storage = file.Storage('prediction.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = tools.run_flow(flow, storage, flags)
http = credentials.authorize(http = httplib2.Http())
service = discovery.build('prediction', 'v1.6', http=http)
try:
papi = service.trainedmodels()
print_header('Fetching list of first ten models')
result = papi.list(maxResults=10).execute()
print 'List results:'
pprint.pprint(result)
except client.AccessTokenRefreshError:
print ("The credentials have been revoked or expired, please re-run"
"the application to re-authorize")
if __name__ == '__main__':
main(sys.argv)
On this line--> parent_parsers = [tools.argparser] I'm having that error:
line 75, in main
parent_parsers = [tools.argparser]
AttributeError: 'module' object has no attribute 'argparser'
The tools.py file is that one:
import BaseHTTPServer
import argparse
import httplib2
import logging
import os
import socket
import sys
import webbrowser
from oauth2client import client
from oauth2client import file
from oauth2client import util
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console <https://code.google.com/apis/console>.
"""
# run_parser is an ArgumentParser that contains command-line options expected
# by tools.run(). Pass it in as part of the 'parents' argument to your own
# ArgumentParser.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument('--auth_host_name', default='localhost',
help='Hostname when running a local web server.')
argparser.add_argument('--noauth_local_webserver', action='store_true',
default=False, help='Do not run a local web server.')
argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
nargs='*', help='Port web server should listen on.')
argparser.add_argument('--logging_level', default='ERROR',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR',
'CRITICAL'],
help='Set the logging level of detail.')
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
"""A server to handle OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into query_params and then stops serving.
"""
query_params = {}
class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler for OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into the servers query_params and then stops serving.
"""
def do_GET(s):
"""Handle a GET request.
Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect
if an error occurred.
"""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
query = s.path.split('?', 1)[-1]
query = dict(parse_qsl(query))
s.server.query_params = query
s.wfile.write("<html><head><title>Authentication Status</title></head>")
s.wfile.write("<body><p>The authentication flow has completed.</p>")
s.wfile.write("</body></html>")
def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program."""
pass
#util.positional(3)
def run_flow(flow, storage, flags, http=None):
logging.getLogger().setLevel(getattr(logging, flags.logging_level))
if not flags.noauth_local_webserver:
success = False
port_number = 0
for port in flags.auth_host_port:
port_number = port
try:
httpd = ClientRedirectServer((flags.auth_host_name, port),
ClientRedirectHandler)
except socket.error, e:
pass
else:
success = True
break
flags.noauth_local_webserver = not success
if not success:
print 'Failed to start a local webserver listening on either port 8080'
print 'or port 9090. Please check your firewall settings and locally'
print 'running programs that may be blocking or using those ports.'
print
print 'Falling back to --noauth_local_webserver and continuing with',
print 'authorization.'
print
if not flags.noauth_local_webserver:
oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
else:
oauth_callback = client.OOB_CALLBACK_URN
flow.redirect_uri = oauth_callback
authorize_url = flow.step1_get_authorize_url()
if not flags.noauth_local_webserver:
webbrowser.open(authorize_url, new=1, autoraise=True)
print 'Your browser has been opened to visit:'
print
print ' ' + authorize_url
print
print 'If your browser is on a different machine then exit and re-run this'
print 'application with the command-line parameter '
print
print ' --noauth_local_webserver'
print
else:
print 'Go to the following link in your browser:'
print
print ' ' + authorize_url
print
code = None
if not flags.noauth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'code' in httpd.query_params:
code = httpd.query_params['code']
else:
print 'Failed to find "code" in the query parameters of the redirect.'
sys.exit('Try running with --noauth_local_webserver.')
else:
code = raw_input('Enter verification code: ').strip()
try:
credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError, e:
sys.exit('Authentication has failed: %s' % e)
storage.put(credential)
credential.set_store(storage)
print 'Authentication successful.'
return credential
def message_if_missing(filename):
"""Helpful message to display if the CLIENT_SECRETS file is missing."""
return _CLIENT_SECRETS_MESSAGE % filename
try:
from old_run import run
except ImportError:
def run(*args, **kwargs):
raise NotImplementedError(
'The gflags library must be installed to use tools.run(). '
'Please install gflags or preferrably switch to using '
'tools.run_flow().')
I don't understand the meaning of the error, it may be an import issue but I don't know.
Thanks!
You need to make sure the oauth tools are setup properly:
python setup_oauth2client.py install
it's in the base directory
I have the logging module MemoryHandler set up to queue debug and error messages for the SMTPHandler target. What I want is for an email to be sent when the process errors that contains all debug statements up to that point (one per line). What I get instead is a separate email for every debug message.
This seems like it should be trivial, and part of the logging package, but I can't find anything about it, no examples, nothing on Google.
log = logging.getLogger()
log.setLevel(logging.DEBUG)
debug_format = logging.Formatter("%(levelname)s at %(asctime)s in %(filename)s (line %(lineno)d):: %(message)s")
# write errors to email
error_mail_subject = "ERROR: Script error in %s on %s" % (sys.argv[0], os.uname()[1])
error_mail_handler = logging.handlers.SMTPHandler(SMTP_HOST, 'errors#'+os.uname()[1], [LOG_EMAIL], error_mail_subject)
error_mail_handler.setLevel(logging.ERROR)
#error_mail_handler.setLevel(logging.DEBUG)
error_mail_handler.setFormatter(debug_format)
# buffer debug messages so they can be sent with error emails
memory_handler = logging.handlers.MemoryHandler(1024*10, logging.ERROR, error_mail_handler)
memory_handler.setLevel(logging.DEBUG)
# attach handlers
log.addHandler(memory_handler)
log.addHandler(error_mail_handler)
Related to this:
Do I need to add the error_mail_handler to the logger explicitly if it is a target of memory_handler anyway?
Should error_mail_handler be set to DEBUG or ERROR target? Does it even need a target when it is being fed from memory_handler?
Would love to see some working code from anyone who has solved this problem.
You might want to use or adapt the BufferingSMTPHandler which is in this test script.
In general, you don't need to add a handler to a logger if it's the target of a MemoryHandler handler which has been added to a logger. If you set the level of a handler, that will affect what the handler actually processes - it won't process anything which is less severe than its level setting.
Instead of buffering for email, consider posting unbuffered to a message stream on a messaging app, e.g. on Matrix, Discord, Slack, etc. Having said that, I wrote my own beastly thread-safe implementation of BufferingSMTPHandler (backup link) which sends emails from a separate thread. The primary goal is to not block the main thread.
As written, it uses two queues - this seemed necessary in order to implement some useful class-level parameters that are defined in the "Configurable parameters" section of the code. Although you can use the code as-is, it's probably better if you study and use it to write your own class.
Issues:
Some class-level parameters can perhaps be instance-level instead.
Either threading.Timer or the signal module could perhaps be used to avoid loops that run forever.
If you are using django - here is simple buffering handler, which will use standard django email methods:
import logging
from django.conf import settings
from django.core.mail import EmailMessage
class DjangoBufferingSMTPHandler(logging.handlers.BufferingHandler):
def __init__(self, capacity, toaddrs=None, subject=None):
logging.handlers.BufferingHandler.__init__(self, capacity)
if toaddrs:
self.toaddrs = toaddrs
else:
# Send messages to site administrators by default
self.toaddrs = zip(*settings.ADMINS)[-1]
if subject:
self.subject = subject
else:
self.subject = 'logging'
def flush(self):
if len(self.buffer) == 0:
return
try:
msg = "\r\n".join(map(self.format, self.buffer))
emsg = EmailMessage(self.subject, msg, to=self.toaddrs)
emsg.send()
except Exception:
# handleError() will print exception info to stderr if logging.raiseExceptions is True
self.handleError(record=None)
self.buffer = []
In django settings.py you will need to configure email and logging like this:
EMAIL_USE_TLS = True
EMAIL_PORT = 25
EMAIL_HOST = '' # example: 'smtp.yandex.ru'
EMAIL_HOST_USER = '' # example: 'user#yandex.ru'
EMAIL_HOST_PASSWORD = ''
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
SERVER_EMAIL = EMAIL_HOST_USER
LOGGING = {
'handlers': {
...
'mail_buffer': {
'level': 'WARN',
'capacity': 9999,
'class': 'utils.logging.DjangoBufferingSMTPHandler',
# optional:
# 'toaddrs': 'admin#host.com'
# 'subject': 'log messages'
}
},
...
}
For this purpose I use the BufferingSMTPHandler suggested by Vinay Sajip with one minor tweak: I set the buffer length to something really big (say 5000 log records) and manualy call the flush method of the handler every some seconds and after checking for internet conectivity.
# init
log_handler1 = BufferingSMTPHandler(
'smtp.host.lala', "from#test.com", ['to#test.com'], 'Log event(s)',5000)
...
logger.addHandler(log_handler1)
...
# main code
...
if internet_connection_ok and seconds_since_last_flush>60:
log_handler1.flush() # send buffered log records (if any)
Updated Vinay Sajip's answer for python3.
import logging
from logging.handlers import BufferingHandler
class BufferingSMTPHandler(BufferingHandler):
def __init__(self, mailhost, fromaddr, toaddrs, subject, capacity):
logging.handlers.BufferingHandler.__init__(self, capacity)
self.mailhost = mailhost
self.mailport = None
self.fromaddr = fromaddr
self.toaddrs = toaddrs
self.subject = subject
self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s"))
def flush(self):
if len(self.buffer) > 0:
try:
import smtplib
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port)
msg = '''From: {}\r\nTo: {}\r\nSubject: {}\r\n\r\n'''.format(
self.fromaddr,
",".join(self.toaddrs),
self.subject
)
for record in self.buffer:
s = self.format(record)
print (s)
msg = msg + s + "\r\n"
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except:
self.handleError(None) # no particular record
self.buffer = []
#update for #Anant
if __name__ == '__main__'
buff_smtp_handler=BufferingSMTPHandler(...your args)
buff_smtp_handler.setLevel(logging.ERROR)
handlers=[buff_smtp_handler]
logging.basicConfig(handlers=handlers)
I think the point about the SMTP logger is that it is meant to send out a significant log message functioning as some kind of alert if sent to a human recipient or else to be further processed by an automated recipient.
If a collection of log messages is to be sent by email then that constitutes a report being sent at the end of execution of a task and writing that log to a file and then emailing the file would seem to be a reasonable solution.
I took a look at the basic FileHandler log handler and how to build a mechanism to write to a temp file then attach that temp file when the script exits.
I found the "atexit" module that allows for a method to be registered that will be executed against an object when the script is exiting.
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
import os
from email import encoders
import uuid
# atexit allows for a method to be set to handle an object when the script exits
import atexit
filename = uuid.uuid4().hex
class MailLogger:
def __init__(self, filePath, smtpDict):
self.filePath = filePath
self.smtpDict = smtpDict
# Generate random file name
filename = '%s.txt' % ( uuid.uuid4().hex )
# Create full filename
filename = '%s/%s' % (filePath,filename)
self.filename = filename
self.fileLogger = logging.getLogger('mailedLog')
self.fileLogger.setLevel(logging.INFO)
self.fileHandler = logging.FileHandler(filename)
self.fileHandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
self.fileHandler.setFormatter(formatter)
self.fileLogger.addHandler(self.fileHandler)
atexit.register(self.mailOut)
def mailOut(self):
'''
Script is exiting so time to mail out the log file
"emailSettings": {
"smtpServer" : "smtp.dom.com",
"smtpPort" : 25,
"sender" : "sender#dom.com>",
"recipients" : [
"recipient#dom.com"
],
"subject" : "Email Subject"
},
'''
# Close the file handler
smtpDict = self.smtpDict
self.fileHandler.close()
msg = MIMEMultipart('alternative')
s = smtplib.SMTP(smtpDict["smtpServer"], smtpDict["smtpPort"] )
msg['Subject'] = smtpDict["subject"]
msg['From'] = smtpDict["sender"]
msg['To'] = ','.join(smtpDict["recipients"])
body = 'See attached report file'
content = MIMEText(body, 'plain')
msg.attach(content)
attachment = MIMEBase('application', 'octet-stream')
attachment.set_payload(open(self.filename, 'rb').read())
encoders.encode_base64(attachment)
attachment.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(self.filename))
msg.attach(attachment)
s.send_message(msg)
s.quit()
My basic test script is:
from EmailLogRpt import MailLogger
import time
smtpDict = {
"smtpServer" : "smtp.dom.com",
"smtpPort" : 25,
"sender" : "sender#dom.com",
"recipients" : [
"recpient#dom.com>"
],
"subject" : "Email Subject"
}
myMailLogger = MailLogger("/home/ed/tmp",smtpDict).fileLogger
myMailLogger.info("test msg 1")
time.sleep(5)
myMailLogger.info("test msg 2")
Hope this helps somebody.