When installed indico, i got error on index.py - python

Im trying install indico while follow installation guide but there is some problem and i can't fix it.
index.py return this message on browser
MOD_PYTHON ERROR
ProcessId: 18206
Interpreter: '127.0.1.1'
ServerName: '127.0.1.1'
DocumentRoot: '/var/www'
URI: '/indico/index.py'
Location: None
Directory: '/opt/indico/htdocs/'
Filename: '/opt/indico/htdocs/index.py'
PathInfo: None
Phase: 'PythonHandler'
Handler: 'mod_python.publisher'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mod_python/importer.py", line 1537, in HandlerDispatch
default=default_handler, arg=req, silent=hlist.silent)
File "/usr/lib/python2.7/dist-packages/mod_python/importer.py", line 1229, in _process_target
result = _execute_target(config, req, object, arg)
File "/usr/lib/python2.7/dist-packages/mod_python/importer.py", line 1128, in _execute_target
result = object(arg)
File "/usr/lib/python2.7/dist-packages/mod_python/publisher.py", line 213, in handler
published = publish_object(req, object)
File "/usr/lib/python2.7/dist-packages/mod_python/publisher.py", line 425, in publish_object
return publish_object(req,util.apply_fs_data(object, req.form, req=req))
File "/usr/lib/python2.7/dist-packages/mod_python/util.py", line 554, in apply_fs_data
return object(**args)
File "/opt/indico/htdocs/index.py", line 23, in index
return welcome.RHWelcome( req ).process( params )
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/rh/base.py", line 707, in process
res = self._processUnexpectedError( e )
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/rh/base.py", line 382, in _processUnexpectedError
return p.display()
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/pages/base.py", line 189, in display
return "%s%s%s"%( self._getHTMLHeader(), \
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/pages/base.py", line 169, in _getHTMLHeader
"assets": self._asset_env
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/wcomponents.py", line 195, in getHTML
tempHTML = templateEngine.render(self.tplFile, vars, self)
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/common/TemplateExec.py", line 128, in render
return template.render(**params)
File "/usr/local/lib/python2.7/dist-packages/Mako-1.0.0-py2.7.egg/mako/template.py", line 443, in render
return runtime._render(self, self.callable_, args, data)
File "/usr/local/lib/python2.7/dist-packages/Mako-1.0.0-py2.7.egg/mako/runtime.py", line 803, in _render
**_kwargs_for_callable(callable_, data))
File "/usr/local/lib/python2.7/dist-packages/Mako-1.0.0-py2.7.egg/mako/runtime.py", line 835, in _render_context
_exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
File "/usr/local/lib/python2.7/dist-packages/Mako-1.0.0-py2.7.egg/mako/runtime.py", line 860, in _exec_template
callable_(context, *args, **kwargs)
File "/opt/indico/tmp/mako_modules/HTMLHeader.tpl.py", line 44, in render_body
__M_writer(str(self_._rh.csrf_token))
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/rh/base.py", line 278, in csrf_token
return self._getSession().csrf_token
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/rh/base.py", line 268, in _getSession
self._setSession()
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/rh/base.py", line 261, in _setSession
self._websession = session.getSessionForReq(self._req)
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/session/__init__.py", line 28, in getSessionForReq
return sm.get_session(req)
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/session/base.py", line 637, in get_session
rw = RequestWrapper.getWrapper( request )
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/session/base.py", line 730, in getWrapper
w = RequestWrapper( req )
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/MaKaC/webinterface/session/base.py", line 709, in __init__
self.environ = {"REMOTE_ADDR": _get_remote_ip(request)}
File "/usr/local/lib/python2.7/dist-packages/indico-1.1.2-py2.7.egg/indico/util/network.py", line 31, in _get_remote_ip
hostIP = str(req.get_remote_ip())
AttributeError: 'mp_request' object has no attribute 'get_remote_ip'
MODULE CACHE DETAILS
Accessed: Fri Jun 20 11:54:56 2014
Generation: 1
_mp_f82f1cbab6d44e96c95ffaf5362fd5ea {
FileName: '/opt/indico/htdocs/index.py'
Instance: 1
Generation: 1
Modified: Fri Jun 20 10:38:13 2014
Imported: Fri Jun 20 10:57:15 2014
}
I don't understand what is the problem..
python -V > Python 2.7.3
apache2 -v > Server version: Apache/2.2.22 (Debian)
libapache2-mod > 3.3-4+deb7u1

Related

spacy.load error: RuntimeError: dictionary changed size during iteration

I am loading a spaCy model as part of a step in my Dataflow streaming pipeline. To load the pre-downloaded spaCy model for a specific language I am using
nlp_model = spacy.load(SPACY_KEYS[lang])
where SPACY_KEYS is a dictionary containing the names of the models for each language (e.g. 'en': 'en_core_web_sm').
This works without any issues for the majority of the jobs run by the pipeline, but for a few iterations I am getting the following error:
Error message from worker: generic::unknown: Traceback (most recent call last):
File "apache_beam/runners/common.py", line 1232, in apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 752, in apache_beam.runners.common.PerWindowInvoker.invoke_process
File "apache_beam/runners/common.py", line 870, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
File "apache_beam/runners/common.py", line 1368, in apache_beam.runners.common._OutputProcessor.process_outputs
File "/usr/local/lib/python3.7/site-packages/submodules/entities_and_pii_removal.py", line 259, in entities_and_PII
nlp_model = spacy.load(SPACY_KEYS[lang]) # load spacy model
File "/usr/local/lib/python3.7/site-packages/spacy/__init__.py", line 52, in load
name, vocab=vocab, disable=disable, exclude=exclude, config=config
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 420, in load_model
return load_model_from_package(name, **kwargs) # type: ignore[arg-type]
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 453, in load_model_from_package
return cls.load(vocab=vocab, disable=disable, exclude=exclude, config=config) # type: ignore[attr-defined]
File "/usr/local/lib/python3.7/site-packages/de_core_news_sm/__init__.py", line 10, in load
return load_model_from_init_py(__file__, **overrides)
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 621, in load_model_from_init_py
config=config,
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 489, in load_model_from_path
return nlp.from_disk(model_path, exclude=exclude, overrides=overrides)
File "/usr/local/lib/python3.7/site-packages/spacy/language.py", line 2042, in from_disk
util.from_disk(path, deserializers, exclude) # type: ignore[arg-type]
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 1299, in from_disk
reader(path / key)
File "/usr/local/lib/python3.7/site-packages/spacy/language.py", line 2037, in <lambda>
p, exclude=["vocab"]
File "spacy/pipeline/trainable_pipe.pyx", line 343, in spacy.pipeline.trainable_pipe.TrainablePipe.from_disk
File "/usr/local/lib/python3.7/site-packages/spacy/util.py", line 1299, in from_disk
reader(path / key)
File "spacy/pipeline/trainable_pipe.pyx", line 333, in spacy.pipeline.trainable_pipe.TrainablePipe.from_disk.load_model
File "spacy/pipeline/trainable_pipe.pyx", line 334, in spacy.pipeline.trainable_pipe.TrainablePipe.from_disk.load_model
File "/usr/local/lib/python3.7/site-packages/thinc/model.py", line 593, in from_bytes
return self.from_dict(msg)
File "/usr/local/lib/python3.7/site-packages/thinc/model.py", line 624, in from_dict
loaded_value = deserialize_attr(default_value, value, attr, node)
File "/usr/local/lib/python3.7/functools.py", line 840, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/local/lib/python3.7/site-packages/thinc/model.py", line 804, in deserialize_attr
return srsly.msgpack_loads(value)
File "/usr/local/lib/python3.7/site-packages/srsly/_msgpack_api.py", line 27, in msgpack_loads
msg = msgpack.loads(data, raw=False, use_list=use_list)
File "/usr/local/lib/python3.7/site-packages/srsly/msgpack/__init__.py", line 76, in unpackb
for decoder in msgpack_decoders.get_all().values():
File "/usr/local/lib/python3.7/site-packages/catalogue/__init__.py", line 110, in get_all
for keys, value in REGISTRY.items():
RuntimeError: dictionary changed size during iteration
I have not been able to identify the cause of this problem. Is there a way of getting around it?

Scrapy hanging with any more than five spiders running

I am trying to run multiple scrapy spiders at once, each from the command line. For example: command
>> python run_scrapy_spider_1.py
>> python run_scrapy_spider_2.py
However, once I already have 5 spiders running (all on different domains), the next one I try to run hangs with the message:
INFO: Telnet console listening on 127.0.0.1:6028
I don't think its an issue with the 6th site I am trying to run (it works fine when less than 5 spiders are running), but instead the fact that 5 spiders are already running.
Here is my very log output long incase it helps, any ideas how I can get more than 5 spiders running at once?
--- Logging error ---
Traceback (most recent call last):
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 33, in emit
payload = self.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 833, in format
return fmt.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 573, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 542, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 386, in format
return self._fmt % record.__dict__
KeyError: 'request_id'
Call stack:
File "run_spider_alexa_id.py", line 16, in <module>
SpiderTasks.run_spider_for_alexa_site_id(alexa_site_id)
File "/home/ec2-user/code/green_brick_two/pricecomparison_project/pricecomparison/scripts/spider_scripts/spider_tasks.py", line 18, in run_spider_for_alexa_site_id
process.crawl(MySpider, alexa_site_id=alexa_site_id)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 80, in crawl
self.engine = self._create_engine()
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 105, in _create_engine
return ExecutionEngine(self, lambda _: self.stop())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/engine.py", line 70, in __init__
self.scraper = Scraper(crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/scraper.py", line 69, in __init__
self.spidermw = SpiderMiddlewareManager.from_crawler(crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/middleware.py", line 53, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/middleware.py", line 48, in from_settings
extra={'crawler': crawler})
File "/usr/local/lib/python3.6/logging/__init__.py", line 1301, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1437, in _log
self.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1447, in handle
self.callHandlers(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/sentry_sdk/integrations/logging.py", line 47, in sentry_patched_callhandlers
return old_callhandlers(self, record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1509, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 858, in handle
self.emit(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 38, in emit
self.handleError(record)
Message: 'Enabled %(componentname)ss:\n%(enabledlist)s'
Arguments: {'componentname': 'spider middleware', 'enabledlist': "['scrapy.spidermiddlewares.httperror.HttpErrorMiddleware',\n 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware',\n 'scrapy.spidermiddlewares.referer.RefererMiddleware',\n 'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware',\n 'scrapy.spidermiddlewares.depth.DepthMiddleware']"}
2019-03-03 22:34:30 [scrapy.middleware] INFO: Enabled spider middlewares:
['scrapy.spidermiddlewares.httperror.HttpErrorMiddleware',
'scrapy.spidermiddlewares.offsite.OffsiteMiddleware',
'scrapy.spidermiddlewares.referer.RefererMiddleware',
'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware',
'scrapy.spidermiddlewares.depth.DepthMiddleware']
--- Logging error ---
Traceback (most recent call last):
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 33, in emit
payload = self.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 833, in format
return fmt.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 573, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 542, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 386, in format
return self._fmt % record.__dict__
KeyError: 'request_id'
Call stack:
File "run_spider_alexa_id.py", line 16, in <module>
SpiderTasks.run_spider_for_alexa_site_id(alexa_site_id)
File "/home/ec2-user/code/green_brick_two/pricecomparison_project/pricecomparison/scripts/spider_scripts/spider_tasks.py", line 18, in run_spider_for_alexa_site_id
process.crawl(MySpider, alexa_site_id=alexa_site_id)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 80, in crawl
self.engine = self._create_engine()
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 105, in _create_engine
return ExecutionEngine(self, lambda _: self.stop())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/engine.py", line 70, in __init__
self.scraper = Scraper(crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/scraper.py", line 71, in __init__
self.itemproc = itemproc_cls.from_crawler(crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/middleware.py", line 53, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/middleware.py", line 48, in from_settings
extra={'crawler': crawler})
File "/usr/local/lib/python3.6/logging/__init__.py", line 1301, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1437, in _log
self.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1447, in handle
self.callHandlers(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/sentry_sdk/integrations/logging.py", line 47, in sentry_patched_callhandlers
return old_callhandlers(self, record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1509, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 858, in handle
self.emit(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 38, in emit
self.handleError(record)
Message: 'Enabled %(componentname)ss:\n%(enabledlist)s'
Arguments: {'componentname': 'item pipeline', 'enabledlist': '[]'}
2019-03-03 22:34:30 [scrapy.middleware] INFO: Enabled item pipelines:
[]
--- Logging error ---
Traceback (most recent call last):
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 33, in emit
payload = self.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 833, in format
return fmt.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 573, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 542, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 386, in format
return self._fmt % record.__dict__
KeyError: 'request_id'
Call stack:
File "run_spider_alexa_id.py", line 16, in <module>
SpiderTasks.run_spider_for_alexa_site_id(alexa_site_id)
File "/home/ec2-user/code/green_brick_two/pricecomparison_project/pricecomparison/scripts/spider_scripts/spider_tasks.py", line 18, in run_spider_for_alexa_site_id
process.crawl(MySpider, alexa_site_id=alexa_site_id)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 82, in crawl
yield self.engine.open_spider(self.spider, start_requests)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/engine.py", line 256, in open_spider
logger.info("Spider opened", extra={'spider': spider})
File "/usr/local/lib/python3.6/logging/__init__.py", line 1301, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1437, in _log
self.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1447, in handle
self.callHandlers(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/sentry_sdk/integrations/logging.py", line 47, in sentry_patched_callhandlers
return old_callhandlers(self, record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1509, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 858, in handle
self.emit(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 38, in emit
self.handleError(record)
Message: 'Spider opened'
Arguments: ()
2019-03-03 22:34:30 [scrapy.core.engine] INFO: Spider opened
--- Logging error ---
Traceback (most recent call last):
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 33, in emit
payload = self.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 833, in format
return fmt.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 573, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 542, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 386, in format
return self._fmt % record.__dict__
KeyError: 'request_id'
Call stack:
File "run_spider_alexa_id.py", line 16, in <module>
SpiderTasks.run_spider_for_alexa_site_id(alexa_site_id)
File "/home/ec2-user/code/green_brick_two/pricecomparison_project/pricecomparison/scripts/spider_scripts/spider_tasks.py", line 18, in run_spider_for_alexa_site_id
process.crawl(MySpider, alexa_site_id=alexa_site_id)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 82, in crawl
yield self.engine.open_spider(self.spider, start_requests)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/engine.py", line 266, in open_spider
yield self.signals.send_catch_log_deferred(signals.spider_opened, spider=spider)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/signalmanager.py", line 61, in send_catch_log_deferred
return _signal.send_catch_log_deferred(signal, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/utils/signal.py", line 65, in send_catch_log_deferred
*arguments, **named)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 150, in maybeDeferred
result = f(*args, **kw)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/pydispatch/robustapply.py", line 55, in robustApply
return receiver(*arguments, **named)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/extensions/logstats.py", line 35, in spider_opened
self.task.start(self.interval)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/task.py", line 194, in start
self()
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/task.py", line 239, in __call__
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 150, in maybeDeferred
result = f(*args, **kw)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/extensions/logstats.py", line 48, in log
logger.info(msg, log_args, extra={'spider': spider})
File "/usr/local/lib/python3.6/logging/__init__.py", line 1301, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1437, in _log
self.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1447, in handle
self.callHandlers(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/sentry_sdk/integrations/logging.py", line 47, in sentry_patched_callhandlers
return old_callhandlers(self, record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1509, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 858, in handle
self.emit(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 38, in emit
self.handleError(record)
Message: 'Crawled %(pages)d pages (at %(pagerate)d pages/min), scraped %(items)d items (at %(itemrate)d items/min)'
Arguments: {'pages': 0, 'pagerate': 0.0, 'items': 0, 'itemrate': 0.0}
2019-03-03 22:34:30 [scrapy.extensions.logstats] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min)
--- Logging error ---
Traceback (most recent call last):
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 33, in emit
payload = self.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 833, in format
return fmt.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 573, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 542, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 386, in format
return self._fmt % record.__dict__
KeyError: 'request_id'
Call stack:
File "run_spider_alexa_id.py", line 16, in <module>
SpiderTasks.run_spider_for_alexa_site_id(alexa_site_id)
File "/home/ec2-user/code/green_brick_two/pricecomparison_project/pricecomparison/scripts/spider_scripts/spider_tasks.py", line 18, in run_spider_for_alexa_site_id
process.crawl(MySpider, alexa_site_id=alexa_site_id)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/crawler.py", line 83, in crawl
yield defer.maybeDeferred(self.engine.start)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 150, in maybeDeferred
result = f(*args, **kw)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/core/engine.py", line 78, in start
yield self.signals.send_catch_log_deferred(signal=signals.engine_started)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/signalmanager.py", line 61, in send_catch_log_deferred
return _signal.send_catch_log_deferred(signal, **kwargs)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/utils/signal.py", line 65, in send_catch_log_deferred
*arguments, **named)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/Twisted-18.4.0-py3.6-linux-x86_64.egg/twisted/internet/defer.py", line 150, in maybeDeferred
result = f(*args, **kw)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/pydispatch/robustapply.py", line 55, in robustApply
return receiver(*arguments, **named)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/scrapy/extensions/telnet.py", line 74, in start_listening
extra={'crawler': self.crawler})
File "/usr/local/lib/python3.6/logging/__init__.py", line 1301, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1437, in _log
self.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1447, in handle
self.callHandlers(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/sentry_sdk/integrations/logging.py", line 47, in sentry_patched_callhandlers
return old_callhandlers(self, record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 1509, in callHandlers
hdlr.handle(record)
File "/usr/local/lib/python3.6/logging/__init__.py", line 858, in handle
self.emit(record)
File "/home/ec2-user/MYVENV/lib/python3.6/site-packages/loggly/handlers.py", line 38, in emit
self.handleError(record)
Message: 'Telnet console listening on %(host)s:%(port)d'
Arguments: {'host': '127.0.0.1', 'port': 6028}
2019-03-03 22:34:30 [scrapy.extensions.telnet] INFO: Telnet console listening on 127.0.0.1:6028

Python client for AWS Redis Cluster

Can anyone suggest a Python client for AWS Redis Cluster enabled?
I'm using redis-py-cluster, but it fails:
Sample code:
from rediscluster import StrictRedisCluster
startup_nodes = [{"host": "xxxx.clustercfg.apn2.cache.amazonaws.com", "port": "6379"}]
r = StrictRedisCluster(startup_nodes=startup_nodes, decode_responses=True, skip_full_coverage_check=False)
r.set('foo', 'bar')
value = r.get('foo')
======
Exception:
Traceback (most recent call last):
File "testRedisCluster.py", line 11, in
r = StrictRedisCluster(startup_nodes=startup_nodes, decode_responses=True, skip_full_coverage_check=False)
File "/Library/Python/2.7/site-packages/rediscluster/client.py", line 181, in init
**kwargs
File "/Library/Python/2.7/site-packages/rediscluster/connection.py", line 141, in init
self.nodes.initialize()
File "/Library/Python/2.7/site-packages/rediscluster/nodemanager.py", line 228, in initialize
need_full_slots_coverage = self.cluster_require_full_coverage(nodes_cache)
File "/Library/Python/2.7/site-packages/rediscluster/nodemanager.py", line 270, in cluster_require_full_coverage
return any(node_require_full_coverage(node) for node in nodes.values())
File "/Library/Python/2.7/site-packages/rediscluster/nodemanager.py", line 270, in
return any(node_require_full_coverage(node) for node in nodes.values())
File "/Library/Python/2.7/site-packages/rediscluster/nodemanager.py", line 267, in node_require_full_coverage
return "yes" in r_node.config_get("cluster-require-full-coverage").values()
File "/Library/Python/2.7/site-packages/redis/client.py", line 715, in config_get
return self.execute_command('CONFIG GET', pattern)
File "/Library/Python/2.7/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/Library/Python/2.7/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/Library/Python/2.7/site-packages/redis/connection.py", line 629, in read_response
raise response
redis.exceptions.ResponseError: unknown command 'CONFIG'
I'm using redis-py-cluster 1.3.4.
Any idea?
Change the parameter skip_full_coverage_check=False to skip_full_coverage_check=True

google-api-python-client broken by OAuth2?

I am trying to check whether a certain dataset exists in BigQuery and I get this very strange error I've never had untill yesterday:
ERROR:dsUtils.bq_utils:Could not check if dataset tmp exists.
Traceback (most recent call last):
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\dsUtils\bq_utils.py", line 113, in _get
resp = bq_service.datasets().get(projectId=self.project_id, datasetId=self.id).execute(num_retries=2)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\util.py", line 137, in positional_wrapper
return wrapped(*args, **kwargs)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\googleapiclient\http.py", line 755, in execute
method=str(self.method), body=self.body, headers=self.headers)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\googleapiclient\http.py", line 93, in _retry_request
resp, content = http.request(uri, method, *args, **kwargs)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\client.py", line 598, in new_request
self._refresh(request_orig)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\client.py", line 864, in _refresh
self._do_refresh_request(http_request)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\client.py", line 891, in _do_refresh_request
body = self._generate_refresh_request_body()
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\client.py", line 1597, in _generate_refresh_request_body
assertion = self._generate_assertion()
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\service_account.py", line 318, in _generate_assertion
key_id=self._private_key_id)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\crypt.py", line 97, in make_signed_jwt
signature = signer.sign(signing_input)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\oauth2client\_pycrypto_crypt.py", line 101, in sign
return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\Crypto\Signature\PKCS1_v1_5.py", line 112, in sign
m = self._key.decrypt(em)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\Crypto\PublicKey\RSA.py", line 174, in decrypt
return pubkey.pubkey.decrypt(self, ciphertext)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\Crypto\PublicKey\pubkey.py", line 93, in decrypt
plaintext=self._decrypt(ciphertext)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\Crypto\PublicKey\RSA.py", line 235, in _decrypt
r = getRandomRange(1, self.key.n-1, randfunc=self._randfunc)
File "C:\Users\paco\Anaconda3\envs\visitForecastEnv\lib\site-packages\Crypto\PublicKey\RSA.py", line 126, in __getattr__
raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,))
AttributeError: _RSAobj object has no '_randfunc' attribute
Has anyone an idea of why I get these errors suddenly?

Jasper Report Module on OpenERP 7

I was trying to install Jasper Report module for OpenERP 7
I got them Syleam mdule from here
https://github.com/syleam/openerp-jasperserver
and download OpenERP 7 from here
http://nightly.openerp.com/7.0/nightly/src/
I already install httplib2, pyPdf and python-dime that was required for this module.But when i try to install the module i got this error
OpenERP Server Error
Client Traceback (most recent call last): File
"/opt/openerp-7/openerp/addons/web/http.py", line 204, in dispatch
response["result"] = method(self, **self.params) File "/opt/openerp-7/openerp/addons/web/controllers/main.py", line 1132, in
call_button
action = self._call_kw(req, model, method, args, {}) File "/opt/openerp-7/openerp/addons/web/controllers/main.py", line 1120, in
_call_kw
return getattr(req.session.model(model), method)(*args, **kwargs) File "/opt/openerp-7/openerp/addons/web/session.py", line 42, in proxy
result = self.proxy.execute_kw(self.session._db, self.session._uid, self.session._password, self.model, method, args,
kw) File "/opt/openerp-7/openerp/addons/web/session.py", line 30, in
proxy_method
result = self.session.send(self.service_name, method, *args) File "/opt/openerp-7/openerp/addons/web/session.py", line 103, in send
raise xmlrpclib.Fault(openerp.tools.ustr(e), formatted_info)
Server Traceback (most recent call last): File
"/opt/openerp-7/openerp/addons/web/session.py", line 89, in send
return openerp.netsvc.dispatch_rpc(service_name, method, args) File "/opt/openerp-7/openerp/netsvc.py", line 296, in dispatch_rpc
result = ExportService.getService(service_name).dispatch(method, params) File "/opt/openerp-7/openerp/service/web_services.py", line
626, in dispatch
res = fn(db, uid, *params) File "/opt/openerp-7/openerp/osv/osv.py", line 190, in execute_kw
return self.execute(db, uid, obj, method, *args, **kw or {}) File "/opt/openerp-7/openerp/osv/osv.py", line 132, in wrapper
return f(self, dbname, *args, **kwargs) File "/opt/openerp-7/openerp/osv/osv.py", line 199, in execute
res = self.execute_cr(cr, uid, obj, method, *args, **kw) File "/opt/openerp-7/openerp/osv/osv.py", line 187, in execute_cr
return getattr(object, method)(cr, uid, *args, **kw) File "/opt/openerp-7/openerp/addons/base/module/module.py", line 426, in
button_immediate_install
return self._button_immediate_function(cr, uid, ids, self.button_install, context=context) File
"/opt/openerp-7/openerp/addons/base/module/module.py", line 477, in
_button_immediate_function
_, pool = pooler.restart_pool(cr.dbname, update_module=True) File "/opt/openerp-7/openerp/pooler.py", line 39, in restart_pool
registry = RegistryManager.new(db_name, force_demo, status, update_module) File "/opt/openerp-7/openerp/modules/registry.py",
line 233, in new
openerp.modules.load_modules(registry.db, force_demo, status, update_module) File "/opt/openerp-7/openerp/modules/loading.py",
line 354, in load_modules
loaded_modules, update_module) File "/opt/openerp-7/openerp/modules/loading.py", line 256, in
load_marked_modules
loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules,
perform_checks=perform_checks) File
"/opt/openerp-7/openerp/modules/loading.py", line 188, in
load_module_graph
load_data(module_name, idref, mode) File "/opt/openerp-7/openerp/modules/loading.py", line 76, in
load_data = lambda *args: _load_data(cr, *args, kind='data') File "/opt/openerp-7/openerp/modules/loading.py", line 124, in
_load_data
tools.convert_xml_import(cr, module_name, fp, idref, mode, noupdate, report) File "/opt/openerp-7/openerp/tools/convert.py",
line 959, in convert_xml_import
obj.parse(doc.getroot()) File "/opt/openerp-7/openerp/tools/convert.py", line 852, in parse
self._tags[rec.tag](self.cr, rec, n) File "/opt/openerp-7/openerp/tools/convert.py", line 812, in _tag_record
f_val = _eval_xml(self,field, self.pool, cr, self.uid, self.idref) File "/opt/openerp-7/openerp/tools/convert.py", line 154, in _eval_xml
for n in node]), idref) File "/opt/openerp-7/openerp/tools/convert.py", line 148, in _process
idref[id]=self.id_get(cr, id) File "/opt/openerp-7/openerp/tools/convert.py", line 829, in id_get
res = self.model_id_get(cr, id_str) File "/opt/openerp-7/openerp/tools/convert.py", line 838, in model_id_get
return model_data_obj.get_object_reference(cr, self.uid, mod, id_str) File "/opt/openerp-7/openerp/tools/cache.py", line 18, in
lookup
r = self.lookup(self2, cr, *args) File "/opt/openerp-7/openerp/tools/cache.py", line 46, in lookup
value = d[key] = self.method(self2, cr, *args) File "/opt/openerp-7/openerp/addons/base/ir/ir_model.py", line 876, in
get_object_reference
data_id = self._get_id(cr, uid, module, xml_id) File "/opt/openerp-7/openerp/tools/cache.py", line 18, in lookup
r = self.lookup(self2, cr, *args) File "/opt/openerp-7/openerp/tools/cache.py", line 46, in lookup
value = d[key] = self.method(self2, cr, *args) File "/opt/openerp-7/openerp/addons/base/ir/ir_model.py", line 869, in
_get_id
raise ValueError('No such external ID currently defined in the system: %s.%s' % (module, xml_id)) ValueError: No such external ID
currently defined in the system:
jasper_server.load_jrxml_file_wizard_action
Anyone can help me what happen and how to solve that ?
oh and 1 more when i try to open module jasper_server_wizard_sample i got an error too (open not install)
There is currently an open Pull Request to " install module without error about missing reference".
Maybe it's a bug and that PR fixes it.

Categories