Markers not showing on Google Map in Google App Engine - python

I have created a Google App Engine project in Python it runs on my localhost but when I upload it onto geo-event-maps.appspot.com the markers are not displaying.
I have a cron which runs to call on /place.
I have no log errors
My datastore is empty!
The txt files are being uploaded with:
file_path = os.path.dirname(__file__)
path = os.path.join(file_path, 'storing', 'txtFiles')
Is there a way of checking the files have been uploaded?!
I am at an absolute loss. Has anyone had these problems before?
Below is my main.py:
'''
Created on Mar 30, 2011
#author: kimmasterson
'''
#!/usr/bin/env python
from google.appengine.ext import webapp
from google.appengine.ext import db
from placemaker import placemaker
import logging
import wsgiref.handlers
import os, glob
from google.appengine.dist import use_library
use_library('django', '1.2')
from google.appengine.ext.webapp import template
class Story(db.Model):
id = db.StringProperty()
loc_name = db.StringProperty()
title = db.StringProperty()
long = db.FloatProperty()
lat = db.FloatProperty()
link = db.StringProperty()
date = db.StringProperty()
class MyStories(webapp.RequestHandler):
def get(self):
temp = db.Query(Story)
temp = temp.count()
story_set = Story.all()
template_values = {
'storyTemp': story_set
}
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, template_values))
class place(webapp.RequestHandler):
def get(self):
#path = '/storing/txtFiles'
file_path = os.path.dirname(__file__)
path = os.path.join(file_path, 'storing', 'txtFiles')
try:
for infile in glob.glob(os.path.join(path, '*.txt')):
#print infile
f = open(infile, 'r')
data = f.read()
newfile = infile.replace('.txt', '')
newfile = newfile.replace('/storing/txtFiles/', '')
#print newfile
storyname = 'http://www.independent.ie/national-news/' + newfile
#print storyname
#print newfile
#logging.info(data)
p = placemaker('HSnG9pPV34EUBcexz.tDYuSrZ8Hnp.LowswI7TxreF8sXrdpVyVIKB4uPGXBYOA9VjjF1Ca42ipd_KhdJsKYjI5cXRo0eJM-')
print p.find_places(data)
for place in p.places:
splitted = place.name.split()
for word in splitted:
temp = db.Query(Story)
temp = temp.filter("link = ", storyname)
results = temp.fetch(limit=1)
if len(results) > 0:
break
elif 'IE' in word:
print temp
print 'success'
print 'name of the file is:' + newfile
story = Story(name=newfile, long=place.centroid.longitude, lat=place.centroid.latitude, link=storyname, loc_name=place.name, title=newfile).put()
#logging.info(type(place.centroid.latitude))
except:
print 'error'
def main():
application = webapp.WSGIApplication([('/', MyStories), ('/place', place)],
debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
Here is my cron.yaml
cron:
- description: running place
url: /place
schedule: every day 11:05
App.yaml is as follows:
application: geo-event-maps
version: 2
runtime: python
api_version: 1
handlers:
- url: .*
script: main.py
builtins:
- datastore_admin: on

You need to be sure your files are being uploaded with your application code, they can not be marked as static files or they won't be accessible to your code. Run appcfg.py with the --verbose flag and make sure they get uploaded.
Second issue, in your place class you define path as path = '/storing/txtFiles'. That is wrong. Your path will probably be something more like:
file_path = os.path.dirname(__file__)
path = os.path.join(file_path, 'storing', 'txtFiles')
Also, I suggest you don't use print, instead use self.response.out.write(stuff_to_write).
You might also want to see about using key_names. You'll be able to make your code quite a bit more efficient then by running a batch db.get instead of a db.Query inside a nested for-loop. Use Appstats and try to minimize the number of RPCs.

First make sure that you are accessing your files using a relative path.
Next ensure you have not marked the files as static within your app.yaml as static files are not uploaded to the same place as your application (they are sent somewhere that the Google Frontend servers can serve them more directly).

Related

How to implement a watchdog like in native python?

I have a tool which generates some reports as html file. Since there are many and it need to be generated manual organizing them manually will take a lot of time and that's why I tried on making a script which will organize the files automatically with some rules I have applied.
import os
import re
import endwith
filefullname = EndsWith('.html')
allfiles = filefullname.findfile()
report_path = "/home/user/reports/"
while True:
files = os.listdir("/home/user/")
if not allfiles:
continue
else:
header = re.match(r"^[^_]+(?=_)", allfiles[0])
if not os.path.exists(report_path + str(header.group())):
os.system(f"mkdir {report_path + str(header.group())}")
os.system(f"mv /home/user/*.html reports/{str(header.group())}")
else:
os.system(f"mv /home/user/*.html reports/{str(header.group())}")
This is the main file which do the automation. and the class is a custom endswith class because the native one returned only boolean types. The thing is it that it runs but the problem is that it requires a restart to finish the job.
Any suggestions?
P.S. This is the class code:
import os
class EndsWith:
def __init__(self, extension):
self.extension = extension
def findfile(self):
files = os.listdir("/home/user/")
file_list = []
for file in files:
#print(file)
if self.extension in file:
file_list.append(file)
return file_list

How to delete a file in a certain folder in Mega using python?

I'm trying to delete a file in a certain folder. I tried to use this command:
file_exists = os.path.exists(line1[0] + '.xlsx')
if file_exists:
find_file = m.find(line1[0] + ".xlsx")
if find_file:
delete_file = m.delete(find_file[0])
The problem is there are multiple files with the same name in different folders. The folder's name is the date that the folder is created. The file name is the name of a course code, eg. BH2952. When I use the above command, all the files that has the name BH2952 are deleted. But I only want to delete the file in today's date folder. Does anyone know how to do this?
And here is the full code that I've done:
import os
import os.path
import sys
from pathlib import Path
from os.path import exists
from datetime import datetime
today = datetime.now()
from mega import Mega
mega = Mega()
# Login to MEGA
m = mega.login('nurul.syamsina1202#gmail.com', 'Syamsina990212')
# Get user details
details = m.get_user()
# Get account disk quota
quota = m.get_quota()
# Get account storage space
''' specify unit output kilo, mega, gig, else bytes will output '''
space = m.get_storage_space(kilo=True)
# Get accounts file
files = m.get_files()
# Create a folder on Mega if the file hasn't been created yet
''' Excludes results which are in the Trash folder (i.e. deleted) '''
folder = m.find("Fourth_Year_Students", exclude_deleted=True)
if not folder:
m.create_folder("Fourth_Year_Students")
subfolder = m.find("Fourth_Year_Students/" + today.strftime('%d%m%Y'), exclude_deleted=True)
if not subfolder:
m.create_folder("Fourth_Year_Students/" + today.strftime('%d%m%Y'))
# Change directory to today's date folder
os.chdir(r"C:/OpenVino/excel_report/Fourth_Year_Students/" + today.strftime('%d%m%Y'))
os.getcwd()
# read class schedule file
file1 = open(r"C:/OpenVino/excel_report/class_codes_and_names.txt", "r")
lines_1 = file1.readlines()
for line1 in lines_1:
line1 = line1.strip('\n')
line1 = line1.split(",")
#os.chdir(directory_name)
file_exists = os.path.exists(line1[0] + '.xlsx')
if file_exists:
find_file = m.find(line1[0] + ".xlsx")
if find_file:
delete_file = m.delete(find_file[0])
# Upload a file and get its public link
folder = m.find("Fourth_Year_Students/" + today.strftime('%d%m%Y'))
file = m.upload(line1[0] + '.xlsx', folder[0])
link = m.get_upload_link(file)
print('\nFile',line1[0],'is ready. To view the file, please click on the link below:\n',link)
# see mega.py for destination and filename options
else:
continue
m.empty_trash()
print("All files have been successfully uploaded in the cloud.")

For loop not working as expected inside python flask

HI i have a small python script which untars a list of files present in a folder.Below is the script.
app = Flask(__name__)
#app.route('/untarJson')
def untarJson():
outdir="C:\\Users\\esrilka\\Documents\\Tar Files\\Untar"
inputfilefolder="C:\\Users\\esrilka\\Documents\\Tar Files\\New tar files\\"
jsonfiles=[]
for filenames in os.listdir(inputfilefolder):
if filenames.endswith(".tar.gz"):
head,tail= os.path.split(filenames)
basename=os.path.splitext(os.path.splitext(tail)[0])[0]
t = tarfile.open(os.path.join(inputfilefolder,filenames), 'r')
for member in t.getmembers():
if "autodiscovery/report.json" in member.name:
with open(os.path.join(outdir,basename + '.json' ), 'wb') as f:
f.write(t.extractfile('autodiscovery/report.json').read())
if __name__ == '__main__':
app.run(debug=True)
It works fine without flask and in the folder i have four tar files and all 4 files are untarred.
But when i use flask only one file is untarred and the only one file name is displayed.
how can i untar all files inside a folder and also return the name of the files(i.,. only short names and not with full path)
See if the below code works for you, I have changed only little bit to your original code and it works without any issues. All the available tar.gz files are untared and file names gets displayed after request completes,
from flask import Flask, jsonify
import tarfile
import os
app = Flask(__name__)
#app.route('/untarJson')
def untarJson():
outdir = "C:\\tests\\untared"
inputfilefolder = "C:\\tests"
jsonfiles = []
for filenames in os.listdir(inputfilefolder):
if filenames.endswith(".tar.gz"):
head, tail = os.path.split(filenames)
basename = os.path.splitext(os.path.splitext(tail)[0])[0]
t = tarfile.open(os.path.join(inputfilefolder, filenames), 'r')
for member in t.getmembers():
if "autodiscovery/report.json" in member.name:
with open(os.path.join(outdir, basename + '.json'), 'wb') as f:
f.write(t.extractfile('autodiscovery/report.json').read())
jsonfiles.append(os.path.join(outdir, basename + '.json'))
return jsonify(jsonfiles), 200
if __name__ == '__main__':
app.run(debug=True)
After request completed, something like below will be returned (output will be different in your case),
[
"C:\tests\untared\autodiscovery1.json",
"C:\tests\untared\autodiscovery2.json",
"C:\tests\untared\autodiscovery3.json"
]

Reload Flask app when template file changes under Linux

I am developing a flask application under Linux, and i'm suffering when i make any changes to template files.
Actually i well configured my app to reload on template changes using
TEMPLATES_AUTO_RELOAD = True
PS: when i develop under Windows templates are reloading normally.
EDIT
I am using the built in server, and i run my app like this :
app = create_app()
manager = Manager(app)
#manager.command
def run():
"""Run in local machine."""
app.run(threaded=True)
Here is my configuration class
class DefaultConfig(object):
# Project name
PROJECT = "***"
# Turns on debugging features in Flask
DEBUG = True
# secret key
SECRET_KEY = "**************"
# Configuration for the Flask-Bcrypt extension
BCRYPT_LEVEL = 12
# Application root directory
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
# Application email
MAIL_FROM_EMAIL = "**********"
# Upload directory
UPLOAD_DIR = "static/uploads/"
# Avater upload directory
UPLOAD_AVATAR_DIR = os.path.join(UPLOAD_DIR, 'avatars/')
ALLOWED_AVATAR_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
# Instance folder path
INSTANCE_FOLDER_PATH = os.path.join('/home/karim/OpenXC/Dashboard/Flask', 'instance')
# Cache configuration
CACHE_TYPE = 'null'
CACHE_DEFAULT_TIMEOUT = 60
TEMPLATES_AUTO_RELOAD = True
# ToolbarExtention Configuration
DEBUG_TB_ENABLED = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
DEBUG_TB_TEMPLATE_EDITOR_ENABLED = True
DEBUG_TB_PROFILER_ENABLED = True
About cache i am using the cache extension by it's disabled. Please check the config file.
Thanks,
I managed to fix my issue by adding my template folder to extra_files parameter of Flask app
Here is how :
extra_dirs = [
'/home/karim/flak_app/templates',
]
extra_files = extra_dirs[:]
for extra_dir in extra_dirs:
for dirname, dirs, files in os.walk(extra_dir):
for filename in files:
filename = os.path.join(dirname, filename)
if os.path.isfile(filename):
extra_files.append(filename)
app.run(threaded=True, extra_files=extra_files)
Hope this will help someone someday :)

Django dynamic file upload path

Here its my django code.
I want to upload my file on specific location and that path is created dynamically.
def get_upload_file(instance, filename):
today_date = datetime.datetime.today().date()
directory = 'Data/'+ str(today_date)
if not os.path.exists(directory):
os.makedirs(directory)
full_path = str(directory)+"/%s" %(filename)
print "full_path --> ",full_path
# user = updated_path()
# print user
return full_path
class UploadFile(models.Model):
path = models.FileField(upload_to=get_upload_file)
I am trying above code to upload file but i want to another directory in it and its name is on username.
expected output
Data/2015-08-16/username/
then i want to upload file in username directory
any solution please help me
Finally I got a solution for the above problem. When I am creating class instance set request object to that instance and access that request object in get_upload_file function
class UploadFile(models.Model):
path = models.FileField(upload_to=get_upload_file)
reqObj = None
def set_reqObj(self, request):
self.reqObj = request
new_file = UploadFile(path = afile)
new_file.set_reqObj(request)
use reqObj in get_upload_file function
instance.reqObj.user.username
updated get_upload_file function is
def get_upload_file(instance, filename):
today_date = datetime.datetime.today().date()
directory = 'Data/'+ str(today_date)+'/'+instance.reqObj.user.username
if not os.path.exists(directory):
os.makedirs(directory)
full_path = str(directory)+"/%s" %(filename)
return full_path

Categories