With python3.7 it produces same output
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:17 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:19 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:24 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
cannot import name 'etree' from 'lxml'
(/usr/lib/python3/dist-packages/lxml/__init__.py)Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:29 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:34 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:39 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:44 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:49 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:54 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:25:59 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:04 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:09 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:14 [scrapy.extensions.logstats] INFO: Crawled 0 pages (at 0
pages/min), scraped 0 items (at 0 items/min)
Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
2019-01-04 14:26:14 [twisted] CRITICAL: Unhandled Error
Traceback (most recent call last):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/commands/crawl.py",
line 58, in run
self.crawler_process.start()
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/crawler.py",
line 293, in start
reactor.run(installSignalHandlers=False) # blocking call
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1267, in run
self.mainLoop()
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
1276, in mainLoop
self.runUntilCurrent()
--- <exception caught here> ---
File "/usr/local/lib/python3.7/dist-packages/twisted/internet/base.py", line
902, in runUntilCurrent
call.func(*call.args, **call.kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/utils/reactor.py",
line 41, in __call__
return self._func(*self._a, **self._kw)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 135, in _next_request
self.crawl(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 210, in crawl
self.schedule(request, spider)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/engine.py",
line 216, in schedule
if not self.slot.scheduler.enqueue_request(request):
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 57, in enqueue_request
dqok = self._dqpush(request)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 86, in _dqpush
self.dqs.push(reqd, -request.priority)
File "/usr/lib/python3/dist-packages/queuelib/pqueue.py", line 33, in push
self.queues[priority] = self.qfactory(priority)
File
"/usr/local/lib/python3.7/dist-packages/Scrapy-1.5.0-py3.7.egg/scrapy/core/scheduler.py",
line 114, in _newdq
return self.dqclass(join(self.dqdir, 'p%s' % priority))
File "/usr/lib/python3/dist-packages/queuelib/queue.py", line 142, in __init__
self.size, = struct.unpack(self.SIZE_FORMAT, qsize)
struct.error: unpack requires a buffer of 4 bytes
** Also affects: python-scrapy (Ubuntu)
Importance: Undecided
Status: New
** Also affects: python3.6 (Ubuntu)
Importance: Undecided
Status: New
** Also affects: python3.7 (Ubuntu)
Importance: Undecided
Status: New
--
You received this bug notification because you are a member of Ubuntu
Bugs, which is subscribed to Ubuntu.
https://bugs.launchpad.net/bugs/1810516
Title:
scrapy interrupt from python3-twisted
To manage notifications about this bug go to:
https://bugs.launchpad.net/ubuntu/+source/python-scrapy/+bug/1810516/+subscriptions
--
ubuntu-bugs mailing list
[email protected]
https://lists.ubuntu.com/mailman/listinfo/ubuntu-bugs