本文整理汇总了Python中scrapy.http.Response.meta['exception']方法的典型用法代码示例。如果您正苦于以下问题:Python Response.meta['exception']方法的具体用法?Python Response.meta['exception']怎么用?Python Response.meta['exception']使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scrapy.http.Response
的用法示例。
在下文中一共展示了Response.meta['exception']方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _test_404_middleware
# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import meta['exception'] [as 别名]
def _test_404_middleware(self):
from twcrawler.middleware.handle_404 import Handle404
mw = Handle404.from_crawler(self.crawler)
url = 'http://example.com/404'
req = Request(url)
req.meta['origin_url'] = url
req.meta['proxy'] = 'xx.xx.xx.xx:404'
for string_404_list in mw.settings.get('HTML_404_STRING'):
body_normal_404 = '\n'.join(['<p>%s</p>'%s for s in string_404_list])
resp = Response(url, body=body_normal_404, status=404, request=req)
ret = mw.process_spider_output(resp, [], self.spider)
ret = list(ret)
assert not ret
resp = Response(url, body='bad_string', status=404, request=req)
ret = mw.process_spider_output(resp, [], self.spider)
ret = list(ret)
# TODO, after add the new request to redis, no item return
#self.assertEqual(ret[0].url, url)
resp = Response(url, body='bad_string', status=200, request=req)
from scrapy import Item, Field
class TestItem(Item):
uid = Field()
item = TestItem()
item['uid'] = 'uid_test'
ret = mw.process_spider_output(resp, [item], self.spider)
ret = list(ret)
self.assertEqual(item, ret[0])
resp.meta['exception'] = 'test exception'
ret = mw.process_spider_output(resp, [resp], self.spider)
ret = list(ret)
# TODO, after add the new request to redis, no item return
#self.assertEqual(ret[0].url, url)
for status in [503, 204, 500]:
resp = Response(url, body='bad_string', status=status, request=req)
ret = mw.process_spider_output(resp, [resp], self.spider)
ret = list(ret)