While starting a map job I am getting this error.
ERROR 2015-05-11 06:03:45,719 webapp2.py:1528] __init__() got an unexpected keyword argument '_user_agent'
Traceback (most recent call last):
File "/home/rshah/google_appengine/lib/webapp2-2.3/webapp2.py", line 1511, in __call__
rv = self.handle_exception(request, response, e)
File "/home/rshah/google_appengine/lib/webapp2-2.3/webapp2.py", line 1505, in __call__
rv = self.router.dispatch(request, response)
File "/home/rshah/google_appengine/lib/webapp2-2.3/webapp2.py", line 1253, in default_dispatcher
return route.handler_adapter(request, response)
File "/home/rshah/google_appengine/lib/webapp2-2.3/webapp2.py", line 1076, in __call__
handler = self.handler(request, response)
File "/home/rshah/work/python/web/fourtop/mapreduce/base_handler.py", line 85, in __init__
_user_agent=self._DEFAULT_USER_AGENT))
File "/home/rshah/google_appengine/google/appengine/datastore/datastore_rpc.py", line 105, in positional_wrapper
return wrapped(*args, **kwds)
TypeError: __init__() got an unexpected keyword argument '_user_agent'
Here is how I am starting the map function.
control.start_map(name='Export Device Health Logs',
handler_spec='data_process.process_health_logs',
reader_spec='mapreduce.input_readers.DatastoreInputReader',
mapper_parameters={"input_reader":{'entity_kind': 'models.DeviceHealth','email':user.email,'to_date':to_date,'from_date':from_date},'output_writer': {'bucket_name': bucket_name,
'content_type': 'text/plain'}},
shard_count=1,
output_writer_spec='mapreduce.output_writers.GoogleCloudStorageConsistentOutputWriter',
mapreduce_parameters={'done_callback':'/tasks/mapreduce/done/export_health_data',
'done_callback_queue':'mapreduce-done'})
*Handler Code :-
This code reads each row of device health model and prepares a CSV file for the same.
After it has done iterating over the rows of data model we prepare CSV file and email it to user
def process_health_logs(health_logs):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
to_date = params.get('to_date')
from_date = params.get('from_date')
to_date = datetime.datetime.strptime(str(to_date),'%Y-%m-%d').date()
from_date = datetime.datetime.strptime(str(from_date),'%Y-%m-%d').date()
error_date = health_logs.error_date if health_logs.error_date else health_logs.added_date
date = error_date.date()
if date >= from_date and date <= to_date:
added_date = time_util.utc_to_local(error_date, health_logs.place.timezone)
device_id = health_logs.device_id
user = health_logs.user.email
place = health_logs.place.name
unique_device_id = health_logs.unique_device_id
error_type = health_logs.error_type
error_level = health_logs.error_level
error_description = health_logs.error_description
row_data=[
added_date,
device_id,
user,
place,
unique_device_id,
error_type,
error_level,
error_description
]
row_data = [gtools.str_or_empty(x).encode('utf-8').replace(',', ';') for x in row_data]
yield ','.join(row_data) + '\n'
Most likely your are using an old cloudstorage module, not compatible with the mapreduce module.
Related
I have a function:
def update_coins_table():
# Check if the currency has been updated in the last hour
up_to_date_currency = Currency.objects.filter(
currency_value_in_dollars_date=
[datetime.now(), timedelta(hours=1)]).order_by('-currency_value_in_dollars_date')[:len(coins_ids)]
if up_to_date_currency.exists():
# Return if it is
return
if not do_greeting():
print("Gecko crypto board not reachable. Db setup")
return
crypto_coins_prices = cg.get_price(ids=coins_ids_str, vs_currencies='usd')
datetime_now = datetime.now()
for coin_key in crypto_coins_prices:
coin = Currency(
currency_name=coin_key,
currency_value_in_dollars=crypto_coins_prices[coin_key]['usd'],
currency_value_in_dollars_date=datetime_now)
coin.save()
and get the following error on executing filter(),
up_to_date_currency = Currency.objects.filter(
currency_value_in_dollars_date=
[datetime.now(), timedelta(hours=1)]).order_by('-currency_value_in_dollars_date')[:len(coins_ids)]
Error message:
Internal Server Error: /get_currency/
Traceback (most recent call last):
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\core\handlers\exception.py", line 47, in inner
response = get_response(request)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\core\handlers\base.py", line 179, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\projects\crypto-currency-board\crypto\manage_crypto_currency\views.py", line 21, in get_latest_currency
update_coins_table()
File "C:\projects\crypto-currency-board\crypto\manage_crypto_currency\get_coins_scheduler.py", line 38, in update_coins_table
up_to_date_currency = Currency.objects.filter(
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\query.py", line 942, in filter
return self._filter_or_exclude(False, *args, **kwargs)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\query.py", line 962, in _filter_or_exclude
clone._filter_or_exclude_inplace(negate, *args, **kwargs)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\query.py", line 969, in _filter_or_exclude_inplace
self._query.add_q(Q(*args, **kwargs))
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\sql\query.py", line 1358, in add_q
clause, _ = self._add_q(q_object, self.used_aliases)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\sql\query.py", line 1377, in _add_q
child_clause, needed_inner = self.build_filter(
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\sql\query.py", line 1319, in build_filter
condition = self.build_lookup(lookups, col, value)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\sql\query.py", line 1165, in build_lookup
lookup = lookup_class(lhs, rhs)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\lookups.py", line 24, in __init__
self.rhs = self.get_prep_lookup()
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\lookups.py", line 76, in get_prep_lookup
return self.lhs.output_field.get_prep_value(self.rhs)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\fields\__init__.py", line 1357, in get_prep_value
value = super().get_prep_value(value)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\fields\__init__.py", line 1217, in get_prep_value
return self.to_python(value)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\db\models\fields\__init__.py", line 1318, in to_python
parsed = parse_datetime(value)
File "C:\projects\crypto-currency-board\venv\lib\site-packages\django\utils\dateparse.py", line 107, in parse_datetime
match = datetime_re.match(value)
TypeError: expected string or bytes-like object
[22/Nov/2020 20:27:44] "GET /get_currency/ HTTP/1.1" 500 142548
The model of 'Currency' is:
class Currency(models.Model):
currency_name = models.CharField(max_length=100)
currency_value_in_dollars = models.FloatField()
currency_value_in_dollars_date = models.DateTimeField()
def __str__(self):
return self.currency_name
Why can't I filter by 'currency_value_in_dollars_date'? check if the currency value has been updated within the last hour.
You can not simply uses a list of two values to filter. If you for example want to retrieve all elements in between you can work with a __range lookup [Django-doc]:
from django.utils.timezone import now
current_time = now()
up_to_date_currency = Currency.objects.filter(
currency_value_in_dollars_date__range=(
current_time-timedelta(hours=1),
current_time
)
).order_by('-currency_value_in_dollars_date')[:len(coins_ids)]
This will thus retrieve all Currency objects between an hour ago and now.
If by
check if the currency value has been updated within the last hour
you mean you want to filter all rows up to 1 hour ago, then replace
currency_value_in_dollars_date=
[datetime.now(), timedelta(hours=1)]
with (notice __gte)
currency_value_in_dollars_date__gte=datetime.now() - timedelta(hours=1)
If you want to filter by array, then you'd have to either use JSONField to match value, or use __in to execute SQL in. Otherwise, you can't filter by array, so what you did is basically invalid.
Depending on your settings, you should probably use timezone.now() from django instead of datetime.
Hi i'm getting this error. TypeError: Object of type ColumnClause is not JSON serializable.
Whole thing:
[2020-10-26 22:17:58,448] ERROR in app: Exception on /all-user [GET]
Traceback (most recent call last):
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\_compat.py", line 39, in reraise
raise value
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "C:\Users\ryand\Desktop\mealplansfree\main-api\api.py", line 36, in decorated
return f(*args, **kwargs)
File "C:\Users\ryand\Desktop\mealplansfree\main-api\api.py", line 59, in get_all_users
return jsonify({'users' : output})
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\json\__init__.py", line 370, in jsonify
dumps(data, indent=indent, separators=separators) + "\n",
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\json\__init__.py", line 211, in dumps
rv = _json.dumps(obj, **kwargs)
File "c:\users\ryand\appdata\local\programs\python\python38-32\lib\json\__init__.py", line 234, in dumps
return cls(
File "c:\users\ryand\appdata\local\programs\python\python38-32\lib\json\encoder.py", line 199, in encode
chunks = self.iterencode(o, _one_shot=True)
File "c:\users\ryand\appdata\local\programs\python\python38-32\lib\json\encoder.py", line 257, in iterencode
return _iterencode(o, 0)
File "c:\users\ryand\.virtualenvs\main-api-ucgvpon1\lib\site-packages\flask\json\__init__.py", line 100, in default
return _json.JSONEncoder.default(self, o)
File "c:\users\ryand\appdata\local\programs\python\python38-32\lib\json\encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type ColumnClause is not JSON serializable
Here is the code for the above.
#app.route('/all-user', methods=['GET'])
#application_required
def get_all_users():
users = User.query.all()
output = []
for user in users:
user_data = {}
user_data['user_id'] = user.user_id
user_data['full_name'] = user.full_name
user_data['username'] = user.username
user_data['password'] = user.password
user_data['admin'] = user.admin
output.append(user_data)
return jsonify({'users' : output})
here is the secret key check
def application_required(f):
#wraps(f)
def decorated(*args, **kwargs):
token = None
if 'x-access-key' in request.headers:
token = request.headers['x-access-key']
if not token:
return jsonify({'message' : 'ERROR x-access-key missing or incorrect.'}), 401
if token == app.config['SECRET_KEY']:
return f(*args, **kwargs)
else:
return jsonify({'message' : 'ERROR x-access-key missing or incorrect.'}), 401
return decorated
If anyone knows whats going on or could guide me through whats going on and how to debug these that would be great!
I found the answer here: SQLAlchemy warning: column won't be part of the declarative mapping
I had missed in my User model in the username it was a lowercase c and not upper case C. This fixed the error and is now working. Thanks!
The cause can be data type. You are using lot of data types. Try to reduce data type changes.
I have a cron job which calls vendor api to fetch the companies list. Once the data is fetched, we are storing that data into cloud datastore as shown in the below code . For some reason for last two days when i trigger the cron job , started seeing the errors. When i debug the code locally i dont see this error
company_list = cron.rest_client.load(config, "companies", '')
if not company_list:
logging.info("Company list is empty")
return "Ok"
for row in company_list:
company_repository.save(row,original_data_source,
actual_data_source)
Repository code
def save( dto, org_ds , act_dp):
try:
key = 'FIN/%s' % (dto['ticker'])
company = CompanyInfo(id=key)
company.stock_code = key
company.ticker = dto['ticker']
company.name = dto['name']
company.original_data_source = org_ds
company.actual_data_provider = act_dp
company.put()
return company
except Exception:
logging.exception("company_repository: error occurred saving the
company record ")
raise
Error
DeadlineExceededError: The overall deadline for responding to the
HTTP request was exceeded.
Exception details
Traceback (most recent call last):
File
"/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/googl
e/appengine/runtime/wsgi.py", line 267, in Handle
result = handler(dict(self._environ), self._StartResponse)
File "/base/data/home/apps/p~svasti-173418/internal-
api:20170808t160537.403249868819304873/lib/flask/app.py", line 1836, in __call__
return self.wsgi_app(environ, start_response)
File "/base/data/home/apps/p~svasti-173418/internal-
api:20170808t160537.403249868819304873/lib/flask/app.py", line 1817, in
wsgi_app
response = self.full_dispatch_request()
File "/base/data/home/apps/p~svasti-173418/internal-
api:20170808t160537.403249868819304873/lib/flask/app.py", line 1475, in full_dispatch_request
rv = self.dispatch_request()
File "/base/data/home/apps/p~svasti-173418/internal-api:20170808t160537.403249868819304873/lib/flask/app.py", line 1461, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/base/data/home/apps/p~svasti-173418/internal-api:20170808t160537.403249868819304873/internal/cron/company_list.py", line 21, in run
company_repository.save(row,original_data_source, actual_data_source)
File "/base/data/home/apps/p~svasti-173418/internal-api:20170808t160537.403249868819304873/internal/repository/company_repository.py", line 13, in save
company.put()
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/model.py", line 3458, in _put
return self._put_async(**ctx_options).get_result()
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 383, in get_result
self.check_success()
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 378, in check_success
self.wait()
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 362, in wait
if not ev.run1():
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/eventloop.py", line 268, in run1
delay = self.run0()
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/ext/ndb/eventloop.py", line 248, in run0
_logging_debug('rpc: %s.%s', rpc.service, rpc.method)
File "/base/data/home/runtimes/python27_experiment/python27_lib/versions/1/google/appengine/api/apiproxy_stub_map.py", line 453, in service
#property
DeadlineExceededError: The overall deadline for responding to the HTTP request was exceeded.
Has your company list been getting bigger?
How many entities are you trying to put?
Try saving them as a batch, instead of sequentially in a loop. Remove company.put() from def save( dto, org_ds , act_dp): and use ndb.put_multi() afterwards instead.
company_list = cron.rest_client.load(config, "companies", '')
if not company_list:
logging.info("Company list is empty")
return "Ok"
company_objs=[]
for row in company_list:
company_objs.append(company_repository.save(row,original_data_source,
actual_data_source))
# put 500 at a time
if len(company_objs) > 500:
ndb.put_multi(company_objs)
company_objs=[]
# put any remainders
if len(company_objs) > 0:
ndb.put_multi(company_objs)
My answer is based on one that Alex gave, but runs async.
I've replaced put_multi() with put_multi_async()
By replacing the call to put_multi() with a call to its async equivalent put_multi_async(), the application can do other things right away instead of blocking on put_multi().
And added #ndb.toplevel decorator
This decorator tells the handler not to exit until its asynchronous requests have finished
If your data grows bigger, you may want to look deeper into defered library. It can be used to respawn task every X batches, with the rest of your unprocessed data.
#ndb.toplevel
def fetch_companies_list():
company_list = cron.rest_client.load(config, "companies", '')
if not company_list:
logging.info("Company list is empty")
return "Ok"
company_objs=[]
for row in company_list:
company_objs.append(company_repository.save(row,original_data_source,
actual_data_source))
# put 500 at a time
if len(company_objs) >= 500:
ndb.put_multi_async(company_objs)
company_objs=[]
# put any remainders
if len(company_objs) > 0:
ndb.put_multi_async(company_objs)
I have these two models:
#################
### Usergroup ###
#################
class Usergroup(ndb.Model):
group_name = ndb.StringProperty(indexed = False, required = True)
is_admin_group = ndb.BooleanProperty(indexed = False, required = False, default = False)
############
### User ###
############
class User(ndb.Model):
fb_id = ndb.StringProperty(indexed = True, required = True)
fb_access_token = ndb.TextProperty(indexed = False, required = True)
email = ndb.StringProperty(indexed = True, required = True)
first_name = ndb.StringProperty(indexed = False, required = True)
last_name = ndb.StringProperty(indexed = False, required = True)
gender = ndb.StringProperty(indexed = False)
group_key = ndb.KeyProperty(indexed = False, required = False, kind = Usergroup, default = ndb.Key(Usergroup, 'member'))
join_date = ndb.DateTimeProperty(indexed = True, auto_now_add = True)
last_login = ndb.DateTimeProperty(indexed = True, auto_now_add = True, auto_now = False)
#app.route('/user/login', methods=['POST'])
def user_login():
me = exchange_token_me(request.form['accessToken'])
if me is not False:
user = user_find_or_register(me)
if user is not None:
register_session(user)
return 'success'
return 'error'
def user_find_or_register(user):
qry = User.query(User.fb_id == user['id'])
existing_user = qry.get()
if existing_user is not None:
existing_user.fb_access_token = user['access_token']
existing_user.fb_id = user['id']
existing_user.email = user['email']
existing_user.last_login = datetime.datetime.now()
existing_user.put()
return existing_user
new_user = User()
new_user.fb_id = user['id']
new_user.fb_access_token = user['access_token']
new_user.email = user['email']
new_user.first_name = user['first_name']
new_user.last_name = user['last_name']
new_user.gender = user['gender']
new_user.last_login = datetime.datetime.now()
#new_user.group_key = ndb.key(Usergroup, 'member')
key = new_user.put()
saved_user = key.get()
#key.delete() # DEBUG
if saved_user is not None:
return saved_user
def register_session(user):
session['fb_id'] = user.fb_id
session['first_name'] = user.first_name
session['last_name'] = user.last_name
session['group_key'] = user.group_key
session['loggedin'] = True
The Usergroup model has a small unique string as entity key. There is already a Usergroup whose key is 'member'
Whenever we create/save a user, it should use the key to the 'member' usergroup, but we get this error instead:
TypeError: Key('Usergroup', 'member') is not JSON serializable
Traceback:
ERROR 2016-10-18 14:32:40,572 app.py:1587] Exception on /user/login [POST]
Traceback (most recent call last):
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1988, in wsgi_app
response = self.full_dispatch_request()
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1643, in full_dispatch_request
response = self.process_response(response)
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1864, in process_response
self.save_session(ctx.session, response)
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 926, in save_session
return self.session_interface.save_session(self, session, response)
File "/var/www/mywebsite/public_html/lib/flask/sessions.py", line 359, in save_session
val = self.get_signing_serializer(app).dumps(dict(session))
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 565, in dumps
payload = want_bytes(self.dump_payload(obj))
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 847, in dump_payload
json = super(URLSafeSerializerMixin, self).dump_payload(obj)
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 550, in dump_payload
return want_bytes(self.serializer.dumps(obj))
File "/var/www/mywebsite/public_html/lib/flask/sessions.py", line 85, in dumps
return json.dumps(_tag(value), separators=(',', ':'))
File "/var/www/mywebsite/public_html/lib/flask/json.py", line 126, in dumps
rv = _json.dumps(obj, **kwargs)
File "/usr/lib/python2.7/json/__init__.py", line 251, in dumps
sort_keys=sort_keys, **kw).encode(obj)
File "/usr/lib/python2.7/json/encoder.py", line 209, in encode
chunks = list(chunks)
File "/usr/lib/python2.7/json/encoder.py", line 434, in _iterencode
for chunk in _iterencode_dict(o, _current_indent_level):
File "/usr/lib/python2.7/json/encoder.py", line 408, in _iterencode_dict
for chunk in chunks:
File "/usr/lib/python2.7/json/encoder.py", line 442, in _iterencode
o = _default(o)
File "/var/www/mywebsite/public_html/lib/flask/json.py", line 83, in default
return _json.JSONEncoder.default(self, o)
File "/usr/lib/python2.7/json/encoder.py", line 184, in default
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: Key('Usergroup', 'member') is not JSON serializable
ERROR 2016-10-18 14:32:40,593 main.py:178] An error occurred during a request.
Traceback (most recent call last):
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1988, in wsgi_app
response = self.full_dispatch_request()
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1643, in full_dispatch_request
response = self.process_response(response)
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 1864, in process_response
self.save_session(ctx.session, response)
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 926, in save_session
return self.session_interface.save_session(self, session, response)
File "/var/www/mywebsite/public_html/lib/flask/sessions.py", line 359, in save_session
val = self.get_signing_serializer(app).dumps(dict(session))
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 565, in dumps
payload = want_bytes(self.dump_payload(obj))
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 847, in dump_payload
json = super(URLSafeSerializerMixin, self).dump_payload(obj)
File "/var/www/mywebsite/public_html/lib/itsdangerous.py", line 550, in dump_payload
return want_bytes(self.serializer.dumps(obj))
File "/var/www/mywebsite/public_html/lib/flask/sessions.py", line 85, in dumps
return json.dumps(_tag(value), separators=(',', ':'))
File "/var/www/mywebsite/public_html/lib/flask/json.py", line 126, in dumps
rv = _json.dumps(obj, **kwargs)
File "/usr/lib/python2.7/json/__init__.py", line 251, in dumps
sort_keys=sort_keys, **kw).encode(obj)
File "/usr/lib/python2.7/json/encoder.py", line 209, in encode
chunks = list(chunks)
File "/usr/lib/python2.7/json/encoder.py", line 434, in _iterencode
for chunk in _iterencode_dict(o, _current_indent_level):
File "/usr/lib/python2.7/json/encoder.py", line 408, in _iterencode_dict
for chunk in chunks:
File "/usr/lib/python2.7/json/encoder.py", line 442, in _iterencode
o = _default(o)
File "/var/www/mywebsite/public_html/lib/flask/json.py", line 83, in default
return _json.JSONEncoder.default(self, o)
File "/usr/lib/python2.7/json/encoder.py", line 184, in default
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: Key('Usergroup', 'member') is not JSON serializable
INFO 2016-10-18 14:32:40,610 module.py:788] default: "POST /user/login HTTP/1.1" 500 27
UPDATE:
After Dan has spotted the problem, the solution is in the following function:
def register_session(user):
session['fb_id'] = user.fb_id
session['first_name'] = user.first_name
session['last_name'] = user.last_name
session['group_key'] = user.group_key.id() # Thanks Dan
session['loggedin'] = True
FWIW, a quick test with your code as models.py shows this to be working just fine, at least on the development server:
from models import User
user = User(email='email', username='username')
user.put()
produced:
This worked without even having a Usergroup entity - a key can exist without a matching entity. Of course, trying to follow the link to the Usergroup in the datastore viewer fails:
Traceback (most recent call last):
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 1536, in __call__
rv = self.handle_exception(request, response, e)
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 1530, in __call__
rv = self.router.dispatch(request, response)
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 1278, in default_dispatcher
return route.handler_adapter(request, response)
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 1102, in __call__
return handler.dispatch()
File "/home/usr_local/google_appengine/google/appengine/tools/devappserver2/admin/admin_request_handler.py", line 96, in dispatch
super(AdminRequestHandler, self).dispatch()
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 572, in dispatch
return self.handle_exception(e, self.app.debug)
File "/home/usr_local/google_appengine_1.9.40/lib/webapp2-2.5.1/webapp2.py", line 570, in dispatch
return method(*args, **kwargs)
File "/home/usr_local/google_appengine/google/appengine/tools/devappserver2/admin/datastore_viewer.py", line 741, in get
entities = [datastore.Get(entity_key)]
File "/home/usr_local/google_appengine/google/appengine/api/datastore.py", line 671, in Get
return GetAsync(keys, **kwargs).get_result()
File "/home/usr_local/google_appengine/google/appengine/api/apiproxy_stub_map.py", line 613, in get_result
return self.__get_result_hook(self)
File "/home/usr_local/google_appengine/google/appengine/datastore/datastore_rpc.py", line 1717, in __get_hook
entities = extra_hook(entities)
File "/home/usr_local/google_appengine/google/appengine/api/datastore.py", line 640, in local_extra_hook
raise datastore_errors.EntityNotFoundError()
EntityNotFoundError
So you might want to show your actual code creating the entity and the full traceback, something else may be going on.
Your traceback indicates that the problem is not with creating the entity, but with saving your session:
File "/var/www/mywebsite/public_html/lib/flask/app.py", line 926, in save_session
return self.session_interface.save_session(self, session, response)
It appears you included in the session content the key object (possibly by including the entire User entity?), which is what causes the failure. For that purpose keys need to be serialized and you can use key.urlsafe() for that. See this answer for an example: https://stackoverflow.com/a/34835074/4495081
If indeed you included the entire User entity in the session you can just include its urlsafe key instead.
Yup, this is the source of your problem:
session['group_key'] = user.group_key
change it to:
session['group_key'] = user.group_key.urlsafe()
And you'll retrieve it like this:
urlsafe_key = session.get('group_key')
if urlsafe_key:
group_key = ndb.Key(urlsafe=urlsafe_key)
I have an application that has worked successfully in the past. Today however, it's started throwing an error when I try write to datastore. For example, I'm creating a new entity of this model
class EventInstallment(ndb.Model):
somekey = ndb.KeyProperty()
somename = ndb.StringProperty(default = "")
start_date = ndb.DateTimeProperty()
notes = ndb.StringProperty("")
moderator_approved = ndb.BooleanProperty(default = True)
added_by = ndb.KeyProperty()
created = ndb.DateTimeProperty(auto_now_add = True)
using this code
ins = somemodel()
ins.somename = "26-september-2016"
ins.somekey = the_key
ins.start_date = datetime.datetime.now()
ins.put()
and this exception gets thrown.
Traceback (most recent call last):
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/admin/__init__.py", line 363, in post
exec(compiled_code, globals())
File "<string>", line 8, in <module>
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/model.py", line 3451, in _put
return self._put_async(**ctx_options).get_result()
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 383, in get_result
self.check_success()
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 427, in _help_tasklet_along
value = gen.throw(exc.__class__, exc, tb)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/context.py", line 824, in put
key = yield self._put_batcher.add(entity, options)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 427, in _help_tasklet_along
value = gen.throw(exc.__class__, exc, tb)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/context.py", line 358, in _put_tasklet
keys = yield self._conn.async_put(options, datastore_entities)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/ext/ndb/tasklets.py", line 513, in _on_rpc_completion
result = rpc.get_result()
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/api/apiproxy_stub_map.py", line 613, in get_result
return self.__get_result_hook(self)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/datastore/datastore_rpc.py", line 1881, in __put_hook
self.check_rpc_success(rpc)
File "/base/data/home/runtimes/python27/python27_lib/versions/1/google/appengine/datastore/datastore_rpc.py", line 1373, in check_rpc_success
raise _ToDatastoreError(err)
BadRequestError: The property.name is the empty string.
Any idea what this issue might be? It looks like a change in GAE ndb - as it worked as recently as 1 month ago...
Shouldn't notes = ndb.StringProperty("") be: notes = ndb.StringProperty(default = "") ?