Is it possible to fetch many objects together from Google App Engine? - python

We have about 5,000 objects of class Domain in Google App Engine, and we want to export the list of domains to CSV. Each domain is linked to an object of class DomainStateData:
class DomainStateData(db.Expando, ExpandoEntity):
plan = db.ReferenceProperty(Plan)
plan_expiration = db.DateTimeProperty()
trial_expiration = db.DateTimeProperty()
date_created = db.DateTimeProperty(auto_now_add=True, indexed=True)
last_modified = db.DateTimeProperty(auto_now=True)
class Domain(db.Expando, ExpandoEntity, SocialIconsEntity):
"""
Domain Model
"""
domain = db.StringProperty(required=True)
...
_state_data = db.ReferenceProperty(DomainStateData)
#property
def state_data(self):
try:
if not self._state_data:
# try to get it, if not, build it
sd = DomainStateData.get_by_key_name(self.key().name())
if not sd:
sd = DomainStateData(key_name=self.key().name()).put()
self._state_data = sd
self.put()
return self._state_data
else:
return self._state_data
except ReferencePropertyResolveError:
self._state_data = DomainStateData(key_name=self.key().name()).put()
self.put()
return self._state_data
I wrote a code which exports 100 domains to CSV (it takes 5 seconds), but if I try to fetch all the 5,000 domains I get a timeout, which is 60 seconds. Is it possible to fetch all the DomainStateData objects together without a timeout? Here is my code that exports the domains to CSV:
import sys
sys.path.insert(0, 'libs')
import webapp2
import datetime
import csv
from models import Domain
class ExportAllDomainsToCsvHandler(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/csv'
self.response.headers['Content-Disposition'] = 'attachment; filename="All Domains [{0}].csv"'.format(str(datetime.date.today()))
writer = csv.writer(self.response.out)
writer.writerow(["Domain", "Current state", "Plan expiration date", "Trial expiration date", "Current oauth user"])
all_domains = Domain.all().fetch(100)
all_domains.sort(key=lambda domain: (0 if domain.state_data.plan_expiration is None else 1, domain.state_data.plan_expiration, 0 if domain.state_data.trial_expiration is None else 1, domain.state_data.trial_expiration, domain.domain))
for domain in all_domains:
if (domain.state_data.plan_expiration is None):
domain_plan_expiration = "No plan expiration date"
else:
domain_plan_expiration = domain.state_data.plan_expiration.strftime('%Y-%m-%d')
if (domain.state_data.trial_expiration is None):
domain_trial_expiration = "No trial expiration date"
else:
domain_trial_expiration = domain.state_data.trial_expiration.strftime('%Y-%m-%d')
writer.writerow([domain.domain, domain.cur_state.name, domain_plan_expiration, domain_trial_expiration, domain.admin])
app = webapp2.WSGIApplication([
("/csv/export_all_domains_to_csv", ExportAllDomainsToCsvHandler)
], debug=True)

OK, I found a solution. I fetched all the DomainStateData objects directly from the database and now it takes 35 seconds to create the CSV with all the domains. Here is my code, I didn't change the models:
import sys
sys.path.insert(0, 'libs')
import webapp2
import datetime
import csv
from models import DomainStateData, Domain
class ExportAllDomainsToCsvHandler(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/csv'
self.response.headers['Content-Disposition'] = 'attachment; filename="All Domains [{0}].csv"'.format(str(datetime.date.today()))
writer = csv.writer(self.response.out)
writer.writerow(["Domain", "Current state", "Plan expiration date", "Trial expiration date", "Current oauth user"])
all_domain_state_data_dict = dict()
all_domain_state_data = DomainStateData.all().fetch(1000000)
all_domains = Domain.all().fetch(1000000)
for domain_state_data in all_domain_state_data:
all_domain_state_data_dict[domain_state_data.key().name()] = domain_state_data
for domain in all_domains:
if (domain.key().name() in all_domain_state_data_dict):
domain.__state_data = all_domain_state_data_dict[domain.key().name()]
all_domains.sort(key=lambda domain: (0 if domain.__state_data.plan_expiration is None else 1, domain.__state_data.plan_expiration, 0 if domain.__state_data.trial_expiration is None else 1, domain.__state_data.trial_expiration, domain.domain))
for domain in all_domains:
if (domain.__state_data.plan_expiration is None):
domain_plan_expiration = "No plan expiration date"
else:
domain_plan_expiration = domain.__state_data.plan_expiration.strftime('%Y-%m-%d')
if (domain.__state_data.trial_expiration is None):
domain_trial_expiration = "No trial expiration date"
else:
domain_trial_expiration = domain.__state_data.trial_expiration.strftime('%Y-%m-%d')
writer.writerow([domain.domain, domain.cur_state.name, domain_plan_expiration, domain_trial_expiration, domain.admin])
app = webapp2.WSGIApplication([
("/csv/export_all_domains_to_csv", ExportAllDomainsToCsvHandler)
], debug=True)

Related

Importing Json into Django Database from url only imports parts of the data and says the rest is up to date but it isn't

I wrote a script which will basically import data from a json file and store it in the database of my Django Application.
I'm simply pointing to a json file and i'm adding "facilities" if they don't exist or they get updated if the last modified date changes.
It worked perfectly fine until i couldn't import the json file locally anymore and made some smaller changes to use an external json file.
When i run the importer now it will only import the first two facilities and then tell me that all others are up to date even though they don't exist. I even modified and tested the json file manually to make sure it is not caused by a bug inside the json file.
I will add the old code plus the modified code below.
One of the main differences is that this part is now at the very bottom in the new version after the if statement:
for key, data_object in data.items():
And also that i'm using "import requests" in the new version with the following code at the bottom of the file. My feeling is that i made a mistake right there:
def handle(self, *args, **options):
"""
Call the function to import data from json url
"""
headers = {'Content-Type': 'application/json'}
response = requests.get(
url=IMPORT_URL,
headers=headers,
)
data = response.json()
for key, data_object in data.items():
self.import_facility_from_file(data_object)
New Version:
import requests
import json
from leads.models import Facility, FacilityAddress, FacilityInspectionInfo, FacilityComplaints
from django.core.management.base import BaseCommand
IMPORT_URL = 'https://importerdomain.test/test.json'
class Command(BaseCommand):
def import_facility_from_file(self, data):
UUID = data.get('UUID', None)
Name = data.get('Name', None)
IssuedNumber = data.get('IssuedNumber', None)
Capacity = data.get('Capacity', None)
Licensee = data.get('Licensee', None)
Email = data.get('Email', None)
AdministratorName = data.get('AdministratorName', None)
TelephoneNumber = data.get('TelephoneNumber', None)
ClosedTimestamp = data.get('ClosedTimestamp', None)
MostRecentLicenseTimestamp = data.get('MostRecentLicenseTimestamp', None)
ImporterLastModifiedTimestamp = data.get('ImporterLastModifiedTimestamp', None)
PrimaryAddress = data["AddressInfo"]["PrimaryAddress"]
SecondaryAddress = data["AddressInfo"]["SecondaryAddress"]
City = data["AddressInfo"]["City"]
RegionOrState = data["AddressInfo"]["RegionOrState"]
PostalCode = data["AddressInfo"]["PostalCode"]
Geolocation = data["AddressInfo"]["Geolocation"]
ComplaintRelatedVisits = data["InspectionInfo"]["ComplaintRelatedVisits"]
InspectionRelatedVisits = data["InspectionInfo"]["InspectionRelatedVisits"]
NumberOfVisits = data["InspectionInfo"]["NumberOfVisits"]
LastVisitTimestamp = data["InspectionInfo"]["LastVisitTimestamp"]
ComplaintsTypeA = data["Complaints"]["ComplaintsTypeA"]
ComplaintsTypeB = data["Complaints"]["ComplaintsTypeB"]
SubstantiatedAllegations = data["Complaints"]["SubstantiatedAllegations"]
TotalAllegations = data["Complaints"]["TotalAllegations"]
if Facility.objects.filter(ImporterLastModifiedTimestamp=ImporterLastModifiedTimestamp):
msg = "\n\nfacility exists and is up to date: {}\n{}".format(Name, str())
print(msg)
else:
print("UPDATE")
facility, facility_created = Facility.objects.update_or_create(UUID=UUID,
defaults={
'Name': Name,
'IssuedNumber': IssuedNumber,
'Capacity': Capacity,
'Licensee': Licensee,
'Email': Email,
'AdministratorName': AdministratorName,
'TelephoneNumber': TelephoneNumber,
'ClosedTimestamp': ClosedTimestamp,
'MostRecentLicenseTimestamp': MostRecentLicenseTimestamp,
'ImporterLastModifiedTimestamp': ImporterLastModifiedTimestamp
}
)
facility_address, address_created = FacilityAddress.objects.update_or_create(AddressInfo=facility,
defaults={
'PrimaryAddress': PrimaryAddress,
'SecondaryAddress': SecondaryAddress,
'City': City,
'RegionOrState': RegionOrState,
'PostalCode': PostalCode,
'Geolocation': Geolocation,
'AddressInfo': facility
}
)
facility_inspection, inspection_created = FacilityInspectionInfo.objects.update_or_create(InspectionInfo=facility,
defaults={
'ComplaintRelatedVisits': ComplaintRelatedVisits,
'InspectionRelatedVisits': InspectionRelatedVisits,
'NumberOfVisits': NumberOfVisits,
'LastVisitTimestamp': LastVisitTimestamp,
'InspectionInfo': facility
}
)
facility_complaints, complaints_created = FacilityComplaints.objects.update_or_create(Complaints=facility,
defaults={
'ComplaintsTypeA': ComplaintsTypeA,
'ComplaintsTypeB': ComplaintsTypeB,
'SubstantiatedAllegations': SubstantiatedAllegations,
'TotalAllegations': TotalAllegations,
'Complaints': facility
}
)
def handle(self, *args, **options):
"""
Call the function to import data from json url
"""
headers = {'Content-Type': 'application/json'}
response = requests.get(
url=IMPORT_URL,
headers=headers,
)
data = response.json()
for key, data_object in data.items():
self.import_facility_from_file(data_object)
Old Version
import os
import json
import traceback
from data_import.models import Facility, FacilityAddress, FacilityInspectionInfo, FacilityComplaints
from django.core.management.base import BaseCommand
from datetime import datetime
from jsontest.settings import BASE_DIR, STATIC_URL
class Command(BaseCommand):
def import_facility_from_file(self):
print("BASE", BASE_DIR)
data_folder = os.path.join(BASE_DIR, 'import_data', 'resources')
for data_file in os.listdir(data_folder):
with open(os.path.join(data_folder, data_file), encoding='utf-8') as data_file:
data = json.loads(data_file.read())
for key, data_object in data.items():
UUID = data_object.get('UUID', None)
Name = data_object.get('Name', None)
IssuedNumber = data_object.get('IssuedNumber', None)
Capacity = data_object.get('Capacity', None)
Licensee = data_object.get('Licensee', None)
Email = data_object.get('Email', None)
AdministratorName = data_object.get('AdministratorName', None)
TelephoneNumber = data_object.get('TelephoneNumber', None)
ClosedTimestamp = data_object.get('ClosedTimestamp', None)
MostRecentLicenseTimestamp = data_object.get('MostRecentLicenseTimestamp', None)
PrimaryAddress = data_object["AddressInfo"]["PrimaryAddress"]
SecondaryAddress = data_object["AddressInfo"]["SecondaryAddress"]
City = data_object["AddressInfo"]["City"]
RegionOrState = data_object["AddressInfo"]["RegionOrState"]
PostalCode = data_object["AddressInfo"]["PostalCode"]
Geolocation = data_object["AddressInfo"]["Geolocation"]
ComplaintRelatedVisits = data_object["InspectionInfo"]["ComplaintRelatedVisits"]
InspectionRelatedVisits = data_object["InspectionInfo"]["InspectionRelatedVisits"]
NumberOfVisits = data_object["InspectionInfo"]["NumberOfVisits"]
LastVisitTimestamp = data_object["InspectionInfo"]["LastVisitTimestamp"]
ComplaintsTypeA = data_object["Complaints"]["ComplaintsTypeA"]
ComplaintsTypeB = data_object["Complaints"]["ComplaintsTypeB"]
SubstantiatedAllegations = data_object["Complaints"]["SubstantiatedAllegations"]
TotalAllegations = data_object["Complaints"]["TotalAllegations"]
LatestUpdateTimestamp = data_object.get('LatestUpdateTimestamp', None)
if Facility.objects.filter(LatestUpdateTimestamp=LatestUpdateTimestamp):
msg = "\n\nfacility exists and is up to date: {}\n{}".format(Name, str())
print(msg)
else:
print("UPDATE")
facility, facility_created = Facility.objects.update_or_create(UUID=UUID,
defaults={
'Name': Name,
'IssuedNumber': IssuedNumber,
'Capacity': Capacity,
'Licensee': Licensee,
'Email': Email,
'AdministratorName': AdministratorName,
'TelephoneNumber': TelephoneNumber,
'ClosedTimestamp': ClosedTimestamp,
'MostRecentLicenseTimestamp': MostRecentLicenseTimestamp,
'LatestUpdateTimestamp': LatestUpdateTimestamp
}
)
def handle(self, *args, **options):
"""
Call the function to import data
"""
self.import_facility_from_file()
The json i'm importing
{"00016ed7be4872a19d6e16afc98a7389b2bb324a2":
{"UUID":"00016ed7be4872a19d6e1ed6f36b647f3eb41cadedd2130b103a5851caebc26fbbbf24c2f1a64d2cf34ac4e03aaa30309816f58c397e6afc98a7389b2bb324a2","Name":"Test Facility","IssuedNumber":"123456","Licensee":"Test Licensee","Email":"test#example.com","AdministratorName":"Test Name","TelephoneNumber":"(123) 456-7890324879","ImporterLastModifiedTimestamp":"1362985200",
"AddressInfo":{"PrimaryAddress":"123 Fake Road","SecondaryAddress":"","City":"Testcity","RegionOrState":"TX","PostalCode":"12345","Geolocation":"00.0000,-00.0000"},"Capacity":100,"MostRecentLicenseTimestamp":1575180000,"ClosedTimestamp":0,
"InspectionInfo":{"ComplaintRelatedVisits":0,"InspectionRelatedVisits":0,"NumberOfVisits":0,"LastVisitTimestamp":0},
"Complaints":{"ComplaintsTypeA":0,"ComplaintsTypeB":0,"SubstantiatedAllegations":0,"TotalAllegations":0}},
"00016ed7be4872a15435435435b2bb324a2":
{"UUID":"000c93dcb7a0b3d5783bb330892aff6abdb9fb57a7d3701c2d903f3640877579f3173ecd8a80532f6c3d53dbacde78a6a54ae42fef321a5793f5a01934f8de7a","Name":"Test Facility 2","IssuedNumber":"123456","Licensee":"Test Licensee","Email":"test#example.com","AdministratorName":"Test Name","TelephoneNumber":"(123) 456-7890324879","ImporterLastModifiedTimestamp":"1362985200",
"AddressInfo":{"PrimaryAddress":"123 Fake Road","SecondaryAddress":"","City":"Testcity","RegionOrState":"TX","PostalCode":"12345","Geolocation":"00.0000,-00.0000"},"Capacity":100,"MostRecentLicenseTimestamp":1575180000,"ClosedTimestamp":0,
"InspectionInfo":{"ComplaintRelatedVisits":0,"InspectionRelatedVisits":0,"NumberOfVisits":0,"LastVisitTimestamp":0},
"Complaints":{"ComplaintsTypeA":0,"ComplaintsTypeB":0,"SubstantiatedAllegations":0,"TotalAllegations":0}},
"00234324324343243afc98a7389b2bb324a2":
{"UUID":"fffd4dec10054e6e1deb2a2266a7c6bb0136ba46222e734ceed5855651f735cfbe0bb66cfaf27c3d175ae261a8f6df0c36b5390d15c70b07d67e35e1081aaf6d","Name":"Test Facility 3","IssuedNumber":"123456","Licensee":"Test Licensee","Email":"test#example.com","AdministratorName":"Test Name","TelephoneNumber":"(123) 456-7890324879","ImporterLastModifiedTimestamp":"1362985200",
"AddressInfo":{"PrimaryAddress":"123 Fake Road","SecondaryAddress":"","City":"Testcity","RegionOrState":"TX","PostalCode":"12345","Geolocation":"00.0000,-00.0000"},"Capacity":100,"MostRecentLicenseTimestamp":1575180000,"ClosedTimestamp":0,
"InspectionInfo":{"ComplaintRelatedVisits":0,"InspectionRelatedVisits":0,"NumberOfVisits":0,"LastVisitTimestamp":0},
"Complaints":{"ComplaintsTypeA":0,"ComplaintsTypeB":0,"SubstantiatedAllegations":0,"TotalAllegations":0}}}
if Facility.objects.filter(ImporterLastModifiedTimestamp=ImporterLastModifiedTimestamp):
The if statement above is True as soon as you check it with a timestamp that is the same as one of the objects inserted before
You need to filter UUID and timestamp to catch the one single object you want to check if it has changed.

Lambda function to check if specific tag do NOT exists-python

I'm trying to get following:
Get all EC2 instances that either:
are Tagged with tag Owner and value Unknown or unknown
are missing tag Owner
I'm able to accomplish 1) but no idea how to get 2)
import boto3
import collections
import datetime
import time
import sys
ec = boto3.client('ec2', 'eu-west-1')
ec2 = boto3.resource('ec2', 'eu-west-1')
def lambda_handler(event, context):
instance_ids = []
reservations = ec.describe_instances(
Filters=[
{'Name': 'tag:Owner', 'Values': ['Unknown', 'unknown']},
]
).get('Reservations', [])
for reservation in reservations:
instances = reservation['Instances']
for instance in instances:
instance_ids.append(instance['InstanceId'])
print("Stopping instances: {}".format(','.join(instance_ids)))
Like I said in the comment you want to forgo the Owner filter so your response includes instances without Owner tag as well, and then you get to filtering locally.
reservations = ec.describe_instances().get('Reservations', [])
for reservation in reservations:
for instance in reservation['Instances']:
tags = {}
for tag in instance['Tags']:
tags[tag['Key']] = tag['Value']
if not 'Owner' in tags:
print instance['InstanceId'] + " does not have Owner tag"
elif tags['Owner'] in ['Unknown', 'unknown']:
print instance['InstanceId'] + " has [U|u]nknown Owner tag"
If you have a large number of instances in your account, the response to describe_instances may be paginated, and you'll have to deal with that as well.
Combining code from my question and #Rage answer i managed to get what i want
Thanks again RaGe !!
import boto3
import collections
import datetime
import time
import sys
ses = boto3.client('ses')
email_from = 'Email'
email_to = 'Email'
email_cc = 'Email'
emaiL_subject = 'Subject'
email_body = 'Body'
ec = boto3.client('ec2', 'eu-west-1')
ec2 = boto3.resource('ec2', 'eu-west-1')
from datetime import datetime
from dateutil.relativedelta import relativedelta
#create date variables
date_after_month = datetime.now()+ relativedelta(days=7)
#date_after_month.strftime('%d/%m/%Y')
today=datetime.now().strftime('%d/%m/%Y')
def lambda_handler(event, context):
#Get instances with Owner Taggs and values Unknown/known
instance_ids = []
reservations = ec.describe_instances().get('Reservations', [])
for reservation in reservations:
for instance in reservation['Instances']:
tags = {}
for tag in instance['Tags']:
tags[tag['Key']] = tag['Value']
if not 'Owner' in tags or tags['Owner']=='unknown' or tags['Owner']=='Unknown':
instance_ids.append(instance['InstanceId'])
#Check if "TerminateOn" tag exists:
if 'TerminateOn' in tags:
#compare TerminteOn value with current date
if tags["TerminateOn"]==today:
#Check if termination protection is enabled
terminate_protection=ec.describe_instance_attribute(InstanceId =instance['InstanceId'] ,Attribute = 'disableApiTermination')
protection_value=(terminate_protection['DisableApiTermination']['Value'])
#if enabled disable it
if protection_value == True:
ec.modify_instance_attribute(InstanceId=instance['InstanceId'],Attribute="disableApiTermination",Value= "False" )
#terminate instance
ec.terminate_instances(InstanceIds=instance_ids)
print "terminated" + str(instance_ids)
#send email that instance is terminated
else:
#Send an email to engineering that this instance will be removed X amount of days (calculate the date based on today's date and the termination date."
now=datetime.now()
future=tags["TerminateOn"]
TerminateOn = datetime.strptime(future, "%d/%m/%Y")
days= (TerminateOn-now).days
print str(instance_ids) + " will be removed in "+ str(days) + " days"
else:
if not 'TerminateOn' in tags:#, create it
ec2.create_tags(Resources=instance_ids,Tags=[{'Key':'TerminateOn','Value':date_after_month.strftime('%d/%m/%Y')}])
ec.stop_instances(InstanceIds=instance_ids)
print "was shut down "+format(','.join(instance_ids))

How to change the printed qweb filename

How to change the printed qweb filename depends on the source document field name?
Im in the model stock.picking
So when i click the print -> Delivery note, then the qweb will be printed, but te filename will be depends on the Source document field.
Here i present the picture that explain what i mean.
EXAMPLE
You can give dynamic report name using configuration, but it will apply when you print one report.
Below is the example to Print custom name in report.Create one field in ir.actions.report.xml, in which user can configure report name.
from openerp import models, fields
class IrActionsReportXml(models.Model):
_inherit = 'ir.actions.report.xml'
download_filename = fields.Char(
'Download filename')
Now you need to create two files.
Report Controller
from openerp import http
from openerp.addons.mail.models import mail_template
from openerp.addons.report.controllers.main import ReportController
from openerp.addons.web.controllers.main import content_disposition
class ReportController(ReportController):
#http.route([
'/report/<path:converter>/<reportname>',
'/report/<path:converter>/<reportname>/<docids>',
])
def report_routes(self, reportname, docids=None, converter=None, **data):
response = super(ReportController, self).report_routes(
reportname, docids=docids, converter=converter, **data)
if docids:
docids = [int(i) for i in docids.split(',')]
report_xml = http.request.session.model('ir.actions.report.xml')
report_ids = report_xml.search(
[('report_name', '=', reportname)])
for report in report_xml.browse(report_ids):
if not report.download_filename:
continue
objects = http.request.session.model(report.model)\
.browse(docids or [])
generated_filename = mail_template.mako_template_env\
.from_string(report.download_filename)\
.render({
'objects': objects,
'o': objects[:1],
'object': objects[:1],
'ext': report.report_type.replace('qweb-', ''),
})
response.headers['Content-Disposition'] = content_disposition(
generated_filename)
return response
#http.route(['/report/download'])
def report_download(self, data, token):
response = super(ReportController, self).report_download(data, token)
# if we got another content disposition before, ditch the one added
# by super()
last_index = None
for i in range(len(response.headers) - 1, -1, -1):
if response.headers[i][0] == 'Content-Disposition':
if last_index:
response.headers.pop(last_index)
last_index = i
return response
2.Report.py
import json
from openerp import http
from openerp.addons.web.controllers import main
from openerp.addons.mail.models import mail_template
class Reports(main.Reports):
#http.route('/web/report', type='http', auth="user")
#main.serialize_exception
def index(self, action, token):
result = super(Reports, self).index(action, token)
action = json.loads(action)
context = dict(http.request.context)
context.update(action["context"])
report_xml = http.request.env['ir.actions.report.xml']
reports = report_xml.search([
('report_name', '=', action['report_name']),
('download_filename', '!=', False)])
for report in reports:
objects = http.request.session.model(context['active_model'])\
.browse(context['active_ids'])
generated_filename = mail_template.mako_template_env\
.from_string(report.download_filename)\
.render({
'objects': objects,
'o': objects[0],
'object': objects[0],
})
result.headers['Content-Disposition'] = main.content_disposition(
generated_filename)
return result
Odoo community Providing us a default module for report custom name. you can directly install this module and set report name like : ${o.name}
Here o means your record.
Below is a link of odoo community module.
https://www.odoo.com/apps/modules/9.0/report_custom_filename/
This may help you.
try attachment in your reprot to change report file name as your desired name.
<report
string="Delivery Slip"
id="action_report_delivery"
model="stock.picking"
report_type="qweb-pdf"
name="stock.report_deliveryslip"
file="stock.report_deliveryslip"
attachment="'Custom Text...'+'.pdf')"
/>
Here is my code which works for different reportname in the same model. So the filename only changes for the certain filename.
>
from openerp.addons.web.http import Controller, route, request
from openerp.addons.web.controllers.main import _serialize_exception
from openerp.osv import osv
from openerp.addons.report.controllers.main import ReportController
from openerp import http
import simplejson
import logging
class SponsorReportController(ReportController):
#route(['/report/download'], type='http', auth="user")
def report_download(self, data, token):
requestcontent = simplejson.loads(data)
url, type = requestcontent[0], requestcontent[1]
logging.info(url)
logging.info(type)
response = ReportController().report_download(data, token)
if len(url.split('/report/pdf/')) > 1 and type == 'qweb-pdf':
reportname = url.split('/report/pdf/')[1].split('?')[0]
reportname, docids = reportname.split('/')
logging.info(reportname)
assert docids
logging.info(docids)
if reportname == 'pci_stock_picking_report.report_delivery_note':
partner_obj = http.request.env['stock.picking']
object = partner_obj.browse(int(docids))
filename = "No."+(object.origin)
response.headers.set('Content-Disposition', 'attachment; filename=%s.pdf;' % filename)
return response

405 Method not Allowed- Get request in Google App Engine, language Python

I know there are a ton of these questions and I have spent hours going through them and trying to figure it out but I am not able to find the problem. My main.py looks like this:
import webapp2
from google.appengine.api import oauth
app = webapp2.WSGIApplication([
('/org', 'org.Organization'),
], debug=True)
app.router.add(webapp2.Route(r'/org/<id:[0-9]+><:/?>', 'org.Organization'))
app.router.add(webapp2.Route(r'/org/search', 'org.OrgSearch'))
app.router.add(webapp2.Route(r'/resources', 'resources.Resource'))
app.router.add(webapp2.Route(r'/resources/<rid:[0-9]+>/org/<oid:[0-9]+><:/?>', 'resources.ResourceOrgs'))
And the code for my get and post request looks like this:
import webapp2
from google.appengine.ext import ndb
import dbase
import json
class Organization(webapp2.RequestHandler):
def post(self):
if 'application/json' not in self.request.accept:
self.response.status = 406
self.response.status_message = "API only supports application/json MIME type"
return
new_org = dbase.Organization()
name = self.request.get('name', default_value=None)
phone = self.request.get('phone', default_value=None)
street = self.request.get('street', default_value=None)
city = self.request.get('city', default_value=None)
state = self.request.get('state', default_value=None)
if name:
new_org.name = name
else:
self.response.status = 400
self.response.status_message = "Invalid request, Name is required"
if phone:
new_org.phone = phone
if street:
new_org.street = street
if city:
new_org.city = city
else:
self.response.status = 400
self.response.status_message = "Invalid request, City is required"
if state:
new_org.state = state
else:
self.response.status = 400
self.response.status_message = "Invalid request, State is required"
key = new_org.put()
out = new_org.to_dict()
self.response.write(json.dumps(out))
return
def get(self, **kwargs):
if 'application/json' not in self.request.accept:
self.response.status = 406
self.response.status_message = "API only supports application/json MIME type"
if 'id' in kwargs:
out = ndb.Key(dbase.Organization, int(kwargs['id'])).get().to_dict()
self.response.write(json.dumps(out))
else:
q = dbase.Organization.query()
keys = q.fetch(keys_only=True)
results = { 'keys' : [x.id() for x in keys]}
self.response.write(json.dumps(results))
I hope somebody can help me because I cannot figure it out and I am running out of time. I am using notepad++ but I changed it so it is using spaces instead of tabs.
Try indenting your def get() and def post()

memcache doesn't work as expected

We have a small application for Google App Engine in Python, and we are using memcache. But memcache keys remain even after memcache.delete, and also memcache returns a number (0) when I expect it to return a string ("undefined"). Here is my code:
check_feature.py:
import sys
sys.path.insert(0, 'libs')
import webapp2
import json
from google.appengine.api import memcache
from models.shard_counter import GeneralCounterShard
from models.check_feature_limit import CheckFeatureLimit
class CheckFeatureHandler(webapp2.RequestHandler):
def get_number_of_users_enabled(self, feature_name):
"""
Get the number of users enabled for the given feature name.
"""
number_of_users_enabled_undefined = "undefined"
number_of_users_enabled = memcache.get(key=feature_name)
if (number_of_users_enabled is None):
check_feature_limit = None
check_feature_limits = CheckFeatureLimit.gql("WHERE feature_name=:1 ORDER BY last_modified DESC LIMIT 1", feature_name)
if (check_feature_limits.count() > 0):
check_feature_limit = check_feature_limits.get()
if (check_feature_limit):
number_of_users_enabled = check_feature_limit.number_of_users_enabled
if (number_of_users_enabled is None):
number_of_users_enabled = number_of_users_enabled_undefined
memcache.add(key=feature_name, value=number_of_users_enabled, time=3600)
if (number_of_users_enabled == number_of_users_enabled_undefined):
number_of_users_enabled = None
return number_of_users_enabled
admin.py:
import sys
sys.path.insert(0, 'libs')
import webapp2
import json
import requests
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from models.shard_counter import GeneralCounterShard
from models.check_feature_limit import CheckFeatureLimit
template.register_template_library("tags.tags")
class AdminHandler(webapp2.RequestHandler):
def get(self):
self.post()
def post(self):
params = {}
number_of_users_enabled_dict = {}
number_of_users_dict = {}
r = requests.get(url="http://jsons.[part_of_link_suppressed].com.s3.amazonaws.com/flags.json")
flags = json.loads(r.text)
if ((flags) and ("depending_on_counter" in flags) and (len(flags["depending_on_counter"]) > 0)):
for feature_name in flags["depending_on_counter"]:
check_feature_limit = None
check_feature_limits = CheckFeatureLimit.gql("WHERE feature_name=:1 ORDER BY last_modified DESC LIMIT 1", feature_name)
if (check_feature_limits.count() > 0):
check_feature_limit = check_feature_limits.get()
number_of_users_enabled = self.request.get(feature_name + "_number_of_users_enabled")
if (number_of_users_enabled):
number_of_users_enabled = int(number_of_users_enabled)
if (not(check_feature_limit)):
check_feature_limit = CheckFeatureLimit(feature_name=feature_name)
check_feature_limit.number_of_users_enabled = number_of_users_enabled
check_feature_limit.put()
memcache.delete(key=feature_name) # I don't think it works.
number_of_users_enabled = None
if (check_feature_limit):
number_of_users_enabled = check_feature_limit.number_of_users_enabled
if (not(number_of_users_enabled is None)):
number_of_users_enabled_dict[feature_name] = number_of_users_enabled
number_of_users = GeneralCounterShard.get_count(feature_name)
number_of_users_dict[feature_name] = number_of_users
params["depending_on_counter"] = flags["depending_on_counter"]
params["number_of_users_enabled_dict"] = number_of_users_enabled_dict
params["number_of_users_dict"] = number_of_users_dict
html = template.render("admin/admin.html", params)
self.response.out.write(html)
app = webapp2.WSGIApplication([
("/admin", AdminHandler)
], debug=True)
The values of test_counter_feature_1 (Number of users to enable) is 2, test_counter_feature_2 is 4 and test_counter_feature_3 is undefined (there is no object), but in the memcache the values are 2, 3 and 0 respectively, even after I save the form (and therefore the memcache should be deleted). What is the problem? I expect the value of test_counter_feature_3 to be "undefined", not 0. And the two other values should be deleted after saving the form.
OK, I found the problem. GeneralCounterShard also saved the same key to memcache, so I renamed the key and everything works now. The new key is feature_name + "_number_of_users_enabled", instead of feature_name like it was before. So I replaced all the calls to memcache with the new key, and now it works. Thank you!

Categories