Hey, any idea about what is the timeout error which I am getting here:
Error trace:
File "/array/purato/python2.6/lib/python2.6/site-packages/cherrypy/_cprequest.py", line 606, in respond
cherrypy.response.body = self.handler()
File "/array/purato/python2.6/lib/python2.6/site-packages/cherrypy/_cpdispatch.py", line 25, in __call__
return self.callable(*self.args, **self.kwargs)
File "sync_server.py", line 853, in put_file
return RequestController_v1_0.put_file(self, *args, **kw)
File "sync_server.py", line 409, in put_file
saved_path, tgt_path, root_folder = self._save_file(client_id, theFile)
File "sync_server.py", line 404, in _save_file
saved_path, tgt_path, root_folder = get_posted_file(cherrypy.request, 'theFile', staging_path)
File "sync_server.py", line 1031, in get_posted_file
, keep_blank_values=True)
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 496, in __init__
self.read_multi(environ, keep_blank_values, strict_parsing)
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 620, in read_multi
environ, keep_blank_values, strict_parsing)
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 498, in __init__
self.read_single()
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 635, in read_single
self.read_lines()
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 657, in read_lines
self.read_lines_to_outerboundary()
File "/array/purato/python2.6/lib/python2.6/cgi.py", line 685, in read_lines_to_outerboundary
line = self.fp.readline(1<<16)
File "/array/purato/python2.6/lib/python2.6/site-packages/cherrypy/wsgiserver/__init__.py", line 206, in readline
data = self.rfile.readline(size)
File "/array/purato/python2.6/lib/python2.6/site-packages/cherrypy/wsgiserver/__init__.py", line 868, in readline
data = self.recv(self._rbufsize)
File "/array/purato/python2.6/lib/python2.6/site-packages/cherrypy/wsgiserver/__init__.py", line 747, in recv
return self._sock.recv(size)
timeout: timed out
Here is the code which is getting called:
def get_posted_file(request, form_field_name, tgt_folder, tgt_fname=None):
logger.debug('get_posted_file: %s' % request.headers['Last-Modified'])
lowerHeaderMap = {}
for key, value in request.headers.items():
lowerHeaderMap[key.lower()] = value
---> dataDict = TmpFieldStorage(fp=request.rfile, headers=lowerHeaderMap, environ={'REQUEST_METHOD':'POST'}
, keep_blank_values=True)
and:
class TmpFieldStorage(cgi.FieldStorage):
"""
Use a named temporary file to allow creation of hard link to final destination
"""
def make_file(self, binary=None):
tmp_folder = os.path.join(get_filer_root(cherrypy.request.login), 'sync_tmp')
if not os.path.exists(tmp_folder):
os.makedirs(tmp_folder)
return tempfile.NamedTemporaryFile(dir=tmp_folder)
environ={'REQUEST_METHOD':'POST'}
That seems a rather deficient environ. The CGI spec requires many more environment variables to be in there, some of which the cgi module is going to need.
In particular there is no CONTENT_LENGTH header. Without it, cgi is defaulting to reading the entire contents of the stream up until EOF. But since it is (probably) a network stream rather than a file there will be no EOF (or at least not one directly at the end of the submission), so the form reader will be sitting there waiting for more input that will never come. Timeout.
Related
I would like to send an image by email with FlaskMailMessage. But this image is a base64 encoded image.
Here is an example code :
def mail_answer():
msg = "iVBORw0KGgoAAAANSUhEUgAAACkAAAAyCAYAAADBcfKuAAABP2lDQ1BJQ0MgUHJvZmlsZQAAKJFjYGDiSSwoyGFhYGDIzSspCnJ3UoiIjFJgf8bAwcDJwMMgwSCfmFxc4BgQ4ANUwgCjUcG3awyMIPqyLsisOVoyrvvXfnVZU7hvNU+t0TNM9SiAKyW1OBlI/wHi+OSCohIGBsYYIFu5vKQAxG4AskWKgI4CsqeA2OkQ9goQOwnC3gNWExLkDGRfALIFkjMSU4DsB0C2ThKSeDoSG2ovCLD6GpkHEnAnyaAktaIERDvnF1QWZaZnlCg4AkMnVcEzL1lPR8HIwMiIgQEU1hDVnwPBYcgodgYhlr+IgcHiKwMD8wSEWNJMBobtrQwMErcQYioLGBj4WxgYtp0vSCxKhDuA8RtLcZqxEYTN4wT05r3//z+rMTCwT2Zg+Dvh///fi/7//7sYaP4dBoYDeQANBV6yWIqalwAAAFZlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA5KGAAcAAAASAAAARKACAAQAAAABAAAAKaADAAQAAAABAAAAMgAAAABBU0NJSQAAAFNjcmVlbnNob3QNr/WAAAAB1GlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj41MDwvZXhpZjpQaXhlbFlEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWERpbWVuc2lvbj40MTwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlVzZXJDb21tZW50PlNjcmVlbnNob3Q8L2V4aWY6VXNlckNvbW1lbnQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgp4Vtd+AAACRklEQVRoBe1XQasBURQ+8zxssFKUZGFjgZKSBUtlqWwoS1mIH2Bno/wBWdr5AVKslaUkCzvZ2FHspJg3V7neGOPeO+7TU/eW5tw53/3ON9+5M2MkWRnwz8fXP9d3kSdE8uqScFI4ycsBXjxiTwoneTnAi0fsSeEkLwd48XzEnvx+5WrP5zN0Oh0YjUYwm81gu91CMBiEcDgMpVIJ/H7/K/S3teifuZGxXC7lVCol2+32hz+XyyW3221ZuRAj9Ko1EprdJNNF6/Ua4vE47Pd74oJqtQqNRoOIewYwtCcrlYpGoMfjgVAoBBaLRVWv1WrBeDxWnWOeqHylmPR6PVV7FXFyv9/HKzebjZzP51WYWCyG80YCYF1Uq9VUArrdrobicDjISNjv/brb7TQ42hPM7Z7P57hbZrMZstksnl8Dq9UKmUzmOr0c0d1vdDCLXK1WuJbb7dbswWvS5/Ndw8sR3WxGB7NIpUW4liRJOL4PkskkoJsJDXRMJBL3EOr5Sw/zZ1W8Xi8sFotnEOocs5PUzByBHyGS6o0zGAyg2WzCdDp9yR+bzQa5XA7q9To4HA5qLqLI4/EIgUAAlIc0NSkJiC64XC6TYDhPbPdkMuEqEFUeDodYAE1AFHk6nWh4mDCsnESRTNX/CCxE8jJWOPk2J00mE69amIeVk9juaDQKTqcTF+ARpNNpJhqiSPTNgr5TIpEIE/EjMHotFotFKBQKj9K654ivRd2Vb0wQnXyjFt1SQqSuNYwJ4SSjYbpw4aSuNYyJj3DyB++qEnf7apQMAAAAAElFTkSuQmCC"
msg = base64.b64decode(msg)
attachment = [
FileStorage(
stream=io.BytesIO(msg),
filename="image01.png",
name="image01.png",
content_type="image/png",
)
]
headers = {"Reply-To": "hello#test.com"}
FlaskMailMessage(
"SO test",
sender="achichi#stackoverflow.com",
recipients=["test#†est.com"],
extra_headers=headers,
attachments=attachments,
)
When I try to execute it I have the following error :
Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py", line 1009, in _bootstrap_inner
self.run()
File "/Users/antoine/Documents/Git/portail/portail/tools/threading.py", line 33, in run
super().run()
File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py", line 946, in run
self._target(*self._args, **self._kwargs)
File "/Users/antoine/Documents/Git/portail/portail/modules/notification/notifications.py", line 83, in _background_notification_handle
manager.mail_users(emailed_users, signal.name, obj)
File "/Users/antoine/Documents/Git/portail/portail/modules/helpdesk/notifications.py", line 360, in mail_users
self.mail_answer(users, ticket)
File "/Users/antoine/Documents/Git/portail/portail/modules/helpdesk/notifications.py", line 465, in mail_answer
mails.send_email(
File "/Users/antoine/Documents/Git/portail/portail/modules/mail/lib.py", line 297, in send_email
return mail.send(msg)
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/flask_mail.py", line 492, in send
message.send(connection)
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/flask_mail.py", line 427, in send
connection.send(self)
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/flask_mail.py", line 190, in send
message.as_bytes() if PY3 else message.as_string(),
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/flask_mail.py", line 385, in as_bytes
return self._message().as_bytes()
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/flask_mail.py", line 350, in _message
f.set_payload(attachment.data)
File "/Users/antoine/Documents/Git/portail/venv/lib/python3.10/site-packages/werkzeug/datastructures.py", line 3023, in __getattr__
return getattr(self.stream, name)
AttributeError: '_io.BytesIO' object has no attribute 'data'
FlaskMail attachments are a list[FileStorage], maybe I do not need io.BytesIO(msg) ? But what should I use instead ?
Judging from the Documentation, it's not a FileStorage that's expected,
as the arguments parameter, but rather an Attachment object (can be seen from the source code as well).
So the actual code to attach a file to the message would be :
def mail_answer():
msg = "iVBORw0KGgoAAAANSUhEUgAAACkAAAAyCAYAAADBcfKuAAABP2lDQ1BJQ0MgUHJvZmlsZQAAKJFjYGDiSSwoyGFhYGDIzSspCnJ3UoiIjFJgf8bAwcDJwMMgwSCfmFxc4BgQ4ANUwgCjUcG3awyMIPqyLsisOVoyrvvXfnVZU7hvNU+t0TNM9SiAKyW1OBlI/wHi+OSCohIGBsYYIFu5vKQAxG4AskWKgI4CsqeA2OkQ9goQOwnC3gNWExLkDGRfALIFkjMSU4DsB0C2ThKSeDoSG2ovCLD6GpkHEnAnyaAktaIERDvnF1QWZaZnlCg4AkMnVcEzL1lPR8HIwMiIgQEU1hDVnwPBYcgodgYhlr+IgcHiKwMD8wSEWNJMBobtrQwMErcQYioLGBj4WxgYtp0vSCxKhDuA8RtLcZqxEYTN4wT05r3//z+rMTCwT2Zg+Dvh///fi/7//7sYaP4dBoYDeQANBV6yWIqalwAAAFZlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA5KGAAcAAAASAAAARKACAAQAAAABAAAAKaADAAQAAAABAAAAMgAAAABBU0NJSQAAAFNjcmVlbnNob3QNr/WAAAAB1GlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj41MDwvZXhpZjpQaXhlbFlEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWERpbWVuc2lvbj40MTwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlVzZXJDb21tZW50PlNjcmVlbnNob3Q8L2V4aWY6VXNlckNvbW1lbnQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgp4Vtd+AAACRklEQVRoBe1XQasBURQ+8zxssFKUZGFjgZKSBUtlqWwoS1mIH2Bno/wBWdr5AVKslaUkCzvZ2FHspJg3V7neGOPeO+7TU/eW5tw53/3ON9+5M2MkWRnwz8fXP9d3kSdE8uqScFI4ycsBXjxiTwoneTnAi0fsSeEkLwd48XzEnvx+5WrP5zN0Oh0YjUYwm81gu91CMBiEcDgMpVIJ/H7/K/S3teifuZGxXC7lVCol2+32hz+XyyW3221ZuRAj9Ko1EprdJNNF6/Ua4vE47Pd74oJqtQqNRoOIewYwtCcrlYpGoMfjgVAoBBaLRVWv1WrBeDxWnWOeqHylmPR6PVV7FXFyv9/HKzebjZzP51WYWCyG80YCYF1Uq9VUArrdrobicDjISNjv/brb7TQ42hPM7Z7P57hbZrMZstksnl8Dq9UKmUzmOr0c0d1vdDCLXK1WuJbb7dbswWvS5/Ndw8sR3WxGB7NIpUW4liRJOL4PkskkoJsJDXRMJBL3EOr5Sw/zZ1W8Xi8sFotnEOocs5PUzByBHyGS6o0zGAyg2WzCdDp9yR+bzQa5XA7q9To4HA5qLqLI4/EIgUAAlIc0NSkJiC64XC6TYDhPbPdkMuEqEFUeDodYAE1AFHk6nWh4mDCsnESRTNX/CCxE8jJWOPk2J00mE69amIeVk9juaDQKTqcTF+ARpNNpJhqiSPTNgr5TIpEIE/EjMHotFotFKBQKj9K654ivRd2Vb0wQnXyjFt1SQqSuNYwJ4SSjYbpw4aSuNYyJj3DyB++qEnf7apQMAAAAAElFTkSuQmCC"
data_bytes = base64.b64encode(msg)
headers = {"Reply-To": "hello#test.com"}
msg = FlaskMailMessage(
"SO test",
sender="achichi#stackoverflow.com",
recipients=["test#†est.com"],
extra_headers=headers
)
msg.attach("filename.png", "image/png", data_bytes)
# ...
I really don't know how to help myself, being unfamiliar with this kind of error, and not finding anything on the Google landscape really. My last hope is one of you guys since I don't know where else to go with this. I tried reinstalling all libraries and setting up a new venv. For more action I don't trust myself enough in these kinds of things.
The code triggering the error:
from wetterdienst import DWDObservationData
observations_daily = DWDObservationData(
station_ids=station_ids_d,
parameter=params_daily,
time_resolution=TimeResolution.DAILY,
start_date="2015-01-01",
end_date="2020-10-10",
tidy_data=True,
humanize_column_names=True,
)
for df in observations_hourly.collect_data():
name = str(df.STATION_ID.iloc[0]).strip(".0")
df.to_csv('./data/hourly/{}.csv'.format(name))
print('{} done'.format(name))
API is found here: https://github.com/earthobservations/wetterdienst
Error:
Traceback (most recent call last):
File "/Users/sashakaun/PycharmProjects/wetter2.0/main.py", line 83, in <module>
for df in observations_hourly.collect_data():
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/wetterdienst/dwd/observations/api.py", line 178, in collect_data
df_parameter = self._collect_parameter_from_station(
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/wetterdienst/dwd/observations/api.py", line 243, in _collect_parameter_from_station
df_period = collect_climate_observations_data(
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/wetterdienst/dwd/observations/access.py", line 82, in collect_climate_observations_data
filenames_and_files = download_climate_observations_data_parallel(remote_files)
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/wetterdienst/dwd/observations/access.py", line 106, in download_climate_observations_data_parallel
return list(zip(remote_files, files_in_bytes))
File "/usr/local/Cellar/python#3.8/3.8.5/Frameworks/Python.framework/Versions/3.8/lib/python3.8/concurrent/futures/_base.py", line 611, in result_iterator
yield fs.pop().result()
File "/usr/local/Cellar/python#3.8/3.8.5/Frameworks/Python.framework/Versions/3.8/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/usr/local/Cellar/python#3.8/3.8.5/Frameworks/Python.framework/Versions/3.8/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/usr/local/Cellar/python#3.8/3.8.5/Frameworks/Python.framework/Versions/3.8/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/wetterdienst/dwd/observations/access.py", line 124, in _download_climate_observations_data
return BytesIO(__download_climate_observations_data(remote_file=remote_file))
File "<decorator-gen-2>", line 2, in __download_climate_observations_data
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/cache/region.py", line 1356, in get_or_create_for_user_func
return self.get_or_create(
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/cache/region.py", line 954, in get_or_create
with Lock(
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/lock.py", line 185, in __enter__
return self._enter()
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/lock.py", line 94, in _enter
generated = self._enter_create(value, createdtime)
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/lock.py", line 178, in _enter_create
return self.creator()
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/cache/region.py", line 920, in gen_value
self.backend.set(key, value)
File "/Users/sashakaun/PycharmProjects/wetter2.0/venv/lib/python3.8/site-packages/dogpile/cache/backends/file.py", line 239, in set
dbm[key] = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
_gdbm.error: Database needs recovery
Thanks a lot!!
A GDBM file has been corrupted. You need to use gdbmtool to recover the database. Install gdbmtool then run
gdbmtool FILENAME
Where FILENAME is the name of the GDBM database. A prompt will appear, then you can enter
gdbmtool> recover summary
If the database can be recovered it will display a summary of the recovery results, eg:
Recovery succeeded.
Keys recovered: 6870650, failed: 5, duplicate: 0
Buckets recovered: 64830, failed: 2
I'm trying to write a celery application that passes numpy arrays (or any arbitrary objects) to the workers. As far as I can tell, this requires serialization to occur via pickle (NB: I'm aware of the security implications but this isn't a concern in this case).
However, even after trying every possible way I could find to allow pickle as a serializer, I keep getting the following kombu exception:
kombu.exceptions.ContentDisallowed: Refusing to deserialize untrusted
content of type pickle (application/x-python-serialize)
My current files are currently:
# tasks.py
from celery import Celery
app = Celery(
'tasks',
broker='redis://localhost',
accept_content=['pickle'],
task_serializer='pickle'
)
#app.task
def adding(x, y):
return x + y
if __name__ == '__main__':
import numpy as np
adding.apply_async((np.array([1]), np.array([1])), serializer='pickle')
In addition I have a config file:
# celeryconfig.py
print('configuring...')
accept_content = ['pickle', 'application/x-python-serialize']
task_serializer = 'pickle'
result_serializer = 'pickle'
from kombu import serialization
serialization.register_pickle()
serialization.enable_insecure_serializers()
However, if I run the worker (celery -A tasks worker --loglevel=info) and then execute the code that makes an async call (python tasks.py), I get the following traceback. Am I missing something?
[2018-06-16 11:46:23,617: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize untrusted content of type pickle (application/x-python-serialize)',)
Traceback (most recent call last):
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 322, in start
blueprint.start(self)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/consufrom celery import Celery
mer/consumer.py", line 598, in start
c.loop(*c.loop_args())
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/transport/redis.py", line 724, in _brpop_read
self.connection._deliver(loads(bytes_to_str(item)), dest)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 572, in on_task_received
callbacks,
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/celery/worker/strategy.py", line 136, in task_message_handler
if body is None and 'args' not in message.payload:
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/opt/anaconda/envs/Python3/lib/python3.6/site-packages/kombu/serialization.py", line 253, in loads
raise self._for_untrusted_content(content_type, 'untrusted')
kombu.exceptions.ContentDisallowed: Refusing to deserialize untrusted content of type pickle (application/x-python-serialize)
For anyone coming to this question:
The answer was to use the app.config_from_object method:
import celeryconfig
app.config_from_object(celeryconfig)
I have a simple xmlrpc server which is written along the line of
server = SimpleXMLRPCServer(('127.0.0.1', 8000),allow_none=True)
server.register_function(self.fetch_buyer_data,'"fetch_buyer_data")
...
...
server.serve_forever()
This is not the complete code, but you get the idea (hopefully) !
In the same server script, I have a function that reads off the contents of an sqlite3 database and return all the data. Something like this:
def fetch_buyer_data(self, projectname):
conn = sqlite3.connect(...)
# read data from sqlite3 database and save it into list
conn.close()
return datalist
And I use this following code from client to access the above function.
proxy = xmlrpclib.ServerProxy("http://%s:%s/" %(hostip,hostport),allow_none=True)
data = proxy.fetch_buyer_data(SELECTED_PROJECT)
It's all good until the data in sqlite3 database get larger (not very large but something like a few Megabytes!), I keep getting the following error message!
Traceback (most recent call last):
File "C:\Custom\src\Client\client.py", line 178, in show_user_page
userpage = UserPage()
File "C:\Custom\src\Client\client.py", line 2371, in __init__
self.update_buyer_table()
File "C:\Custom\src\Client\client.py", line 6106, in update_buyer_table
data = proxy.fetch_buyer_data(SELECTED_PROJECT)
File "C:\Python27\Lib\xmlrpclib.py", line 1224, in __call__
return self.__send(self.__name, args)
File "C:\Python27\Lib\xmlrpclib.py", line 1578, in __request
verbose=self.__verbose
File "C:\Python27\Lib\xmlrpclib.py", line 1264, in request
return self.single_request(host, handler, request_body, verbose)
File "C:\Python27\Lib\xmlrpclib.py", line 1297, in single_request
return self.parse_response(response)
File "C:\Python27\Lib\xmlrpclib.py", line 1453, in parse_response
stream = GzipDecodedResponse(response)
File "C:\Python27\Lib\xmlrpclib.py", line 1204, in __init__
self.stringio = StringIO.StringIO(response.read())
File "C:\Python27\Lib\httplib.py", line 548, in read
s = self._safe_read(self.length)
File "C:\Python27\Lib\httplib.py", line 649, in _safe_read
raise IncompleteRead(''.join(s), amt)
httplib.IncompleteRead: IncompleteRead(8031 bytes read, 1732 more expected)
NOTE: I have checked that the rest of the registered functions on server are working. So,I can rule out the connection problems(ip,port etc).
What is casuing this error message? How do I overcome the problem?
I am using python 2.7 on windows xp sp3.
UPDATE 1:
I found out that this doesn't entirely depends on the size of the database. Sometimes it gives me error message, sometimes it doesn't. Could anyone tell me what is casuing this IncompleteRead Problem?
Can anyone help me with this strange error I'm getting when adding a new document to a Whoosh index?
Here's the code:
def add_to_index(self, doc):
ix = index.open_dir(self.index_dir)
writer = AsyncWriter(ix) # use async writer to prevent write lock errors
writer.add_document(**self.get_doc_args(doc))
writer.commit()
def get_doc_args(self, doc):
return {
'id': u""+str(doc['id']),
'org': doc['org__id'],
'created': doc['created_date'],
'date': doc['received_date'],
'from_addr': doc['from_addr'],
'subject': doc['subject'],
'body': doc['messagebody__cleaned_message']
}
I get the following error:
TypeError('ord() expected a character, but string of length 0 found',)
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/celery/execute/trace.py", line 36, in trace
return cls(states.SUCCESS, retval=fun(*args, **kwargs))
File "/usr/local/lib/python2.6/dist-packages/celery/app/task/__init__.py", line 232, in __call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/celery/app/__init__.py", line 172, in run
return fun(*args, **kwargs)
File "/mnt/deploy/prod/chorus/src/chorus/../chorus/search/__init__.py", line 131, in index_message
MessageSearcher().add_to_index(message)
File "/mnt/deploy/prod/chorus/src/chorus/../chorus/search/__init__.py", line 29, in add_to_index
writer.commit()
File "/usr/local/lib/python2.6/dist-packages/whoosh/writing.py", line 423, in commit
self.writer.commit(*args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filewriting.py", line 501, in commit
new_segments = mergetype(self, self.segments)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filewriting.py", line 78, in MERGE_SMALL
reader = SegmentReader(writer.storage, writer.schema, seg)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filereading.py", line 63, in __init__
self.termsindex = TermIndexReader(tf)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filetables.py", line 590, in __init__
super(TermIndexReader, self).__init__(dbfile)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filetables.py", line 502, in __init__
OrderedHashReader.__init__(self, dbfile)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filetables.py", line 379, in __init__
HashReader.__init__(self, dbfile)
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/filetables.py", line 187, in __init__
self.hashtype = dbfile.read_byte()
File "/usr/local/lib/python2.6/dist-packages/whoosh/filedb/structfile.py", line 219, in read_byte
return ord(self.file.read(1))
Strangely, the exact same code using a standard writer (i.e. not AsyncWriter) works just fine. What am I missing here? Note that in production I have to use AsyncWriter in order to avoid LockErrors.
This error is caused by some kind of index corruption. In my case the machine crashed by another reason while index was being rebuild.
You can easily solve it by deleting whoosh_index folder contents completely and rebuilidng index.
Ended up finding a solution; it's called Solr :-)