昨天都还是正常的可以爬取数据,今天早上就续行了,很多字段报错,下面只是其中一个,之前改了一些修复了一些,现在还有,求大佬解答
Traceback (most recent call last):
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\utils\defer.py", line 279, in iter_errback
yield next(it)
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\utils\python.py", line 350, in next
return next(self.data)
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\utils\python.py", line 350, in next
return next(self.data)
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync
for r in iterable:
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\spidermiddlewares\offsite.py", line 28, in
return (r for r in result or () if self._filter(r, spider))
File "C:\Users\80576.conda\envs\Network_spider\lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync
for r in iterable:
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\spidermiddlewares\referer.py", line 352, in
return (self._set_referer(r, response) for r in result or ())
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync
for r in iterable:
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\spidermiddlewares\urllength.py", line 27, in
return (r for r in result or () if self._filter(r, spider))
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync
for r in iterable:
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\spidermiddlewares\depth.py", line 31, in
return (r for r in result or () if self._filter(r, response, spider))
File "C:\Users\.conda\envs\Network_spider\lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync
for r in iterable:
File "C:\python\WeiBo_Topic\weibo-search\weibo\spiders\search.py", line 109, in parse
for weibo in self.parse_weibo(response):
File "C:\python\WeiBo_Topic\weibo-search\weibo\spiders\search.py", line 519, in parse_weibo
weibo['retweet_id'] = retweet['id']
File "C:\Users\80576.conda\envs\Network_spider\lib\site-packages\scrapy\item.py", line 79, in getitem
return self._values[key]
KeyError: 'id'