当前位置: 首页>>代码示例>>Python>>正文


Python Response.copy方法代码示例

本文整理汇总了Python中scrapy.http.Response.copy方法的典型用法代码示例。如果您正苦于以下问题:Python Response.copy方法的具体用法?Python Response.copy怎么用?Python Response.copy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scrapy.http.Response的用法示例。


在下文中一共展示了Response.copy方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: TestHttpErrorMiddlewareHandleAll

# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import copy [as 别名]
class TestHttpErrorMiddlewareHandleAll(TestCase):
    def setUp(self):
        self.spider = BaseSpider("foo")
        self.mw = HttpErrorMiddleware(Settings({"HTTPERROR_ALLOW_ALL": True}))
        self.req = Request("http://scrapytest.org")

        self.res200 = Response("http://scrapytest.org", status=200)
        self.res200.request = self.req
        self.res404 = Response("http://scrapytest.org", status=404)
        self.res404.request = self.req
        self.res402 = Response("http://scrapytest.org", status=402)
        self.res402.request = self.req

    def test_process_spider_input(self):
        self.assertEquals(None, self.mw.process_spider_input(self.res200, self.spider))
        self.assertEquals(None, self.mw.process_spider_input(self.res404, self.spider))

    def test_meta_overrides_settings(self):
        request = Request("http://scrapytest.org", meta={"handle_httpstatus_list": [404]})
        res404 = self.res404.copy()
        res404.request = request
        res402 = self.res402.copy()
        res402.request = request

        self.assertEquals(None, self.mw.process_spider_input(res404, self.spider))
        self.assertRaises(HttpError, self.mw.process_spider_input, res402, self.spider)
开发者ID:rom232,项目名称:scrapy,代码行数:28,代码来源:test_spidermiddleware_httperror.py

示例2: TestHttpErrorMiddleware

# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import copy [as 别名]
class TestHttpErrorMiddleware(TestCase):
    def setUp(self):
        self.spider = BaseSpider("foo")
        self.mw = HttpErrorMiddleware(Settings({}))
        self.req = Request("http://scrapytest.org")

        self.res200 = Response("http://scrapytest.org", status=200)
        self.res200.request = self.req
        self.res404 = Response("http://scrapytest.org", status=404)
        self.res404.request = self.req

    def test_process_spider_input(self):
        self.assertEquals(None, self.mw.process_spider_input(self.res200, self.spider))
        self.assertRaises(HttpError, self.mw.process_spider_input, self.res404, self.spider)

    def test_process_spider_exception(self):
        self.assertEquals([], self.mw.process_spider_exception(self.res404, HttpError(self.res404), self.spider))
        self.assertEquals(None, self.mw.process_spider_exception(self.res404, Exception(), self.spider))

    def test_handle_httpstatus_list(self):
        res = self.res404.copy()
        res.request = Request("http://scrapytest.org", meta={"handle_httpstatus_list": [404]})
        self.assertEquals(None, self.mw.process_spider_input(res, self.spider))

        self.spider.handle_httpstatus_list = [404]
        self.assertEquals(None, self.mw.process_spider_input(self.res404, self.spider))
开发者ID:rom232,项目名称:scrapy,代码行数:28,代码来源:test_spidermiddleware_httperror.py

示例3: TestHttpErrorMiddleware

# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import copy [as 别名]
class TestHttpErrorMiddleware(TestCase):

    def setUp(self):
        self.spider = BaseSpider()
        self.mw = HttpErrorMiddleware()
        self.req = Request('http://scrapytest.org')

        self.res200 = Response('http://scrapytest.org', status=200)
        self.res200.request = self.req
        self.res404 = Response('http://scrapytest.org', status=404)
        self.res404.request = self.req

    def test_process_spider_input(self):
        self.assertEquals(self.mw.process_spider_input(self.res200, self.spider),
                          None)

        self.assertEquals(self.mw.process_spider_input(self.res404, self.spider),
                          [])

    def test_handle_httpstatus_list(self):
        res = self.res404.copy()
        res.request = Request('http://scrapytest.org',
                              meta={'handle_httpstatus_list': [404]})

        self.assertEquals(self.mw.process_spider_input(res, self.spider),
                          None)

        self.spider.handle_httpstatus_list = [404]
        self.assertEquals(self.mw.process_spider_input(self.res404, self.spider),
                          None)
开发者ID:serkanh,项目名称:scrapy,代码行数:32,代码来源:test_spidermiddleware_httperror.py

示例4: TestHttpErrorMiddlewareSettings

# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import copy [as 别名]
class TestHttpErrorMiddlewareSettings(TestCase):
    """Similar test, but with settings"""

    def setUp(self):
        self.spider = Spider('foo')
        self.mw = HttpErrorMiddleware(Settings({'HTTPERROR_ALLOWED_CODES': (402,)}))
        self.req = Request('http://scrapytest.org')

        self.res200 = Response('http://scrapytest.org', status=200)
        self.res200.request = self.req
        self.res404 = Response('http://scrapytest.org', status=404)
        self.res404.request = self.req
        self.res402 = Response('http://scrapytest.org', status=402)
        self.res402.request = self.req

    def test_process_spider_input(self):
        self.assertEquals(None,
                self.mw.process_spider_input(self.res200, self.spider))
        self.assertRaises(HttpError,
                self.mw.process_spider_input, self.res404, self.spider)
        self.assertEquals(None,
                self.mw.process_spider_input(self.res402, self.spider))

    def test_meta_overrides_settings(self):
        request = Request('http://scrapytest.org',
                              meta={'handle_httpstatus_list': [404]})
        res404 = self.res404.copy()
        res404.request = request
        res402 = self.res402.copy()
        res402.request = request

        self.assertEquals(None,
            self.mw.process_spider_input(res404, self.spider))
        self.assertRaises(HttpError,
                self.mw.process_spider_input, res402, self.spider)

    def test_spider_override_settings(self):
        self.spider.handle_httpstatus_list = [404]
        self.assertEquals(None,
            self.mw.process_spider_input(self.res404, self.spider))
        self.assertRaises(HttpError,
                self.mw.process_spider_input, self.res402, self.spider)
开发者ID:dvska,项目名称:scrapy,代码行数:44,代码来源:test_spidermiddleware_httperror.py

示例5: test_get_cached_beautifulsoup

# 需要导入模块: from scrapy.http import Response [as 别名]
# 或者: from scrapy.http.Response import copy [as 别名]
    def test_get_cached_beautifulsoup(self):
        r1 = Response('http://www.example.com', body='')

        soup1 = get_cached_beautifulsoup(r1)
        soup2 = get_cached_beautifulsoup(r1)

        assert isinstance(soup1, BeautifulSoup)
        assert isinstance(soup2, BeautifulSoup)
        # make sure it's cached
        assert soup1 is soup2

        # when body is None, an empty soup should be returned
        r1 = Response('http://www.example.com')
        assert r1.body == ""
        assert isinstance(get_cached_beautifulsoup(r1), BeautifulSoup)

        r1 = Response('http://www.example.com', body='')
        soup1 = get_cached_beautifulsoup(r1)
        r2 = r1.copy()
        soup2 = get_cached_beautifulsoup(r1)
        soup3 = get_cached_beautifulsoup(r2)

        assert soup1 is soup2
        assert soup1 is not soup3
开发者ID:kenzouyeh,项目名称:scrapy,代码行数:26,代码来源:test_utils_response.py


注:本文中的scrapy.http.Response.copy方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。