本文整理汇总了Python中tests.mockserver.MockServer.url方法的典型用法代码示例。如果您正苦于以下问题:Python MockServer.url方法的具体用法?Python MockServer.url怎么用?Python MockServer.url使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tests.mockserver.MockServer
的用法示例。
在下文中一共展示了MockServer.url方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Http11MockServerTestCase
# 需要导入模块: from tests.mockserver import MockServer [as 别名]
# 或者: from tests.mockserver.MockServer import url [as 别名]
class Http11MockServerTestCase(unittest.TestCase):
"""HTTP 1.1 test case with MockServer"""
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
@defer.inlineCallbacks
def test_download_with_content_length(self):
crawler = get_crawler(SingleRequestSpider)
# http://localhost:8998/partial set Content-Length to 1024, use download_maxsize= 1000 to avoid
# download it
yield crawler.crawl(seed=Request(url=self.mockserver.url('/partial'), meta={'download_maxsize': 1000}))
failure = crawler.spider.meta['failure']
self.assertIsInstance(failure.value, defer.CancelledError)
@defer.inlineCallbacks
def test_download(self):
crawler = get_crawler(SingleRequestSpider)
yield crawler.crawl(seed=Request(url=self.mockserver.url('')))
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
@defer.inlineCallbacks
def test_download_gzip_response(self):
crawler = get_crawler(SingleRequestSpider)
body = b'1' * 100 # PayloadResource requires body length to be 100
request = Request(self.mockserver.url('/payload'), method='POST',
body=body, meta={'download_maxsize': 50})
yield crawler.crawl(seed=request)
failure = crawler.spider.meta['failure']
# download_maxsize < 100, hence the CancelledError
self.assertIsInstance(failure.value, defer.CancelledError)
if six.PY2:
request.headers.setdefault(b'Accept-Encoding', b'gzip,deflate')
request = request.replace(url=self.mockserver.url('/xpayload'))
yield crawler.crawl(seed=request)
# download_maxsize = 50 is enough for the gzipped response
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
else:
# See issue https://twistedmatrix.com/trac/ticket/8175
raise unittest.SkipTest("xpayload only enabled for PY2")
示例2: CrawlTestCase
# 需要导入模块: from tests.mockserver import MockServer [as 别名]
# 或者: from tests.mockserver.MockServer import url [as 别名]
class CrawlTestCase(TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
self.runner = CrawlerRunner()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
@defer.inlineCallbacks
def test_follow_all(self):
crawler = self.runner.create_crawler(FollowAllSpider)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(crawler.spider.urls_visited), 11) # 10 + start_url
@defer.inlineCallbacks
def test_delay(self):
# short to long delays
yield self._test_delay(0.2, False)
yield self._test_delay(1, False)
# randoms
yield self._test_delay(0.2, True)
yield self._test_delay(1, True)
@defer.inlineCallbacks
def _test_delay(self, delay, randomize):
settings = {"DOWNLOAD_DELAY": delay, 'RANDOMIZE_DOWNLOAD_DELAY': randomize}
crawler = CrawlerRunner(settings).create_crawler(FollowAllSpider)
yield crawler.crawl(maxlatency=delay * 2, mockserver=self.mockserver)
t = crawler.spider.times
totaltime = t[-1] - t[0]
avgd = totaltime / (len(t) - 1)
tolerance = 0.6 if randomize else 0.2
self.assertTrue(avgd > delay * (1 - tolerance),
"download delay too small: %s" % avgd)
@defer.inlineCallbacks
def test_timeout_success(self):
crawler = self.runner.create_crawler(DelaySpider)
yield crawler.crawl(n=0.5, mockserver=self.mockserver)
self.assertTrue(crawler.spider.t1 > 0)
self.assertTrue(crawler.spider.t2 > 0)
self.assertTrue(crawler.spider.t2 > crawler.spider.t1)
@defer.inlineCallbacks
def test_timeout_failure(self):
crawler = CrawlerRunner({"DOWNLOAD_TIMEOUT": 0.35}).create_crawler(DelaySpider)
yield crawler.crawl(n=0.5, mockserver=self.mockserver)
self.assertTrue(crawler.spider.t1 > 0)
self.assertTrue(crawler.spider.t2 == 0)
self.assertTrue(crawler.spider.t2_err > 0)
self.assertTrue(crawler.spider.t2_err > crawler.spider.t1)
# server hangs after receiving response headers
yield crawler.crawl(n=0.5, b=1, mockserver=self.mockserver)
self.assertTrue(crawler.spider.t1 > 0)
self.assertTrue(crawler.spider.t2 == 0)
self.assertTrue(crawler.spider.t2_err > 0)
self.assertTrue(crawler.spider.t2_err > crawler.spider.t1)
@defer.inlineCallbacks
def test_retry_503(self):
crawler = self.runner.create_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl(self.mockserver.url("/status?n=503"), mockserver=self.mockserver)
self._assert_retried(l)
@defer.inlineCallbacks
def test_retry_conn_failed(self):
crawler = self.runner.create_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl("http://localhost:65432/status?n=503", mockserver=self.mockserver)
self._assert_retried(l)
@defer.inlineCallbacks
def test_retry_dns_error(self):
crawler = self.runner.create_crawler(SimpleSpider)
with LogCapture() as l:
# try to fetch the homepage of a non-existent domain
yield crawler.crawl("http://dns.resolution.invalid./", mockserver=self.mockserver)
self._assert_retried(l)
@defer.inlineCallbacks
def test_start_requests_bug_before_yield(self):
with LogCapture('scrapy', level=logging.ERROR) as l:
crawler = self.runner.create_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_before_yield=1, mockserver=self.mockserver)
self.assertEqual(len(l.records), 1)
record = l.records[0]
self.assertIsNotNone(record.exc_info)
self.assertIs(record.exc_info[0], ZeroDivisionError)
@defer.inlineCallbacks
def test_start_requests_bug_yielding(self):
with LogCapture('scrapy', level=logging.ERROR) as l:
crawler = self.runner.create_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_yielding=1, mockserver=self.mockserver)
self.assertEqual(len(l.records), 1)
#.........这里部分代码省略.........
示例3: FileDownloadCrawlTestCase
# 需要导入模块: from tests.mockserver import MockServer [as 别名]
# 或者: from tests.mockserver.MockServer import url [as 别名]
class FileDownloadCrawlTestCase(TestCase):
pipeline_class = 'scrapy.pipelines.files.FilesPipeline'
store_setting_key = 'FILES_STORE'
media_key = 'files'
media_urls_key = 'file_urls'
expected_checksums = set([
'5547178b89448faf0015a13f904c936e',
'c2281c83670e31d8aaab7cb642b824db',
'ed3f6538dc15d4d9179dae57319edc5f'])
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
# prepare a directory for storing files
self.tmpmediastore = self.mktemp()
os.mkdir(self.tmpmediastore)
self.settings = {
'ITEM_PIPELINES': {self.pipeline_class: 1},
self.store_setting_key: self.tmpmediastore,
}
self.runner = CrawlerRunner(self.settings)
self.items = []
def tearDown(self):
shutil.rmtree(self.tmpmediastore)
self.items = []
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.items.append(item)
def _create_crawler(self, spider_class, **kwargs):
crawler = self.runner.create_crawler(spider_class, **kwargs)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
return crawler
def _assert_files_downloaded(self, items, logs):
self.assertEqual(len(items), 1)
self.assertIn(self.media_key, items[0])
# check that logs show the expected number of successful file downloads
file_dl_success = 'File (downloaded): Downloaded file from'
self.assertEqual(logs.count(file_dl_success), 3)
# check that the images/files checksums are what we know they should be
if self.expected_checksums is not None:
checksums = set(
i['checksum']
for item in items
for i in item[self.media_key])
self.assertEqual(checksums, self.expected_checksums)
# check that the image files where actually written to the media store
for item in items:
for i in item[self.media_key]:
self.assertTrue(
os.path.exists(
os.path.join(self.tmpmediastore, i['path'])))
def _assert_files_download_failure(self, crawler, items, code, logs):
# check that the item does NOT have the "images/files" field populated
self.assertEqual(len(items), 1)
self.assertIn(self.media_key, items[0])
self.assertFalse(items[0][self.media_key])
# check that there was 1 successful fetch and 3 other responses with non-200 code
self.assertEqual(crawler.stats.get_value('downloader/request_method_count/GET'), 4)
self.assertEqual(crawler.stats.get_value('downloader/response_count'), 4)
self.assertEqual(crawler.stats.get_value('downloader/response_status_count/200'), 1)
self.assertEqual(crawler.stats.get_value('downloader/response_status_count/%d' % code), 3)
# check that logs do show the failure on the file downloads
file_dl_failure = 'File (code: %d): Error downloading file from' % code
self.assertEqual(logs.count(file_dl_failure), 3)
# check that no files were written to the media store
self.assertEqual(os.listdir(self.tmpmediastore), [])
@defer.inlineCallbacks
def test_download_media(self):
crawler = self._create_crawler(MediaDownloadSpider)
with LogCapture() as log:
yield crawler.crawl(self.mockserver.url("/files/images/"),
media_key=self.media_key,
media_urls_key=self.media_urls_key)
self._assert_files_downloaded(self.items, str(log))
@defer.inlineCallbacks
def test_download_media_wrong_urls(self):
crawler = self._create_crawler(BrokenLinksMediaDownloadSpider)
with LogCapture() as log:
yield crawler.crawl(self.mockserver.url("/files/images/"),
media_key=self.media_key,
media_urls_key=self.media_urls_key)
self._assert_files_download_failure(crawler, self.items, 404, str(log))
@defer.inlineCallbacks
def test_download_media_redirected_default_failure(self):
#.........这里部分代码省略.........
示例4: ProxyConnectTestCase
# 需要导入模块: from tests.mockserver import MockServer [as 别名]
# 或者: from tests.mockserver.MockServer import url [as 别名]
class ProxyConnectTestCase(TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
self._oldenv = os.environ.copy()
self._proxy = HTTPSProxy()
self._proxy.start()
# Wait for the proxy to start.
time.sleep(1.0)
os.environ['https_proxy'] = self._proxy.http_address()
os.environ['http_proxy'] = self._proxy.http_address()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
self._proxy.shutdown()
os.environ = self._oldenv
@defer.inlineCallbacks
def test_https_connect_tunnel(self):
crawler = get_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl(self.mockserver.url("/status?n=200", is_secure=True))
self._assert_got_response_code(200, l)
@defer.inlineCallbacks
def test_https_noconnect(self):
proxy = os.environ['https_proxy']
os.environ['https_proxy'] = proxy + '?noconnect'
crawler = get_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl(self.mockserver.url("/status?n=200", is_secure=True))
self._assert_got_response_code(200, l)
@defer.inlineCallbacks
def test_https_connect_tunnel_error(self):
crawler = get_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl("https://localhost:99999/status?n=200")
self._assert_got_tunnel_error(l)
@defer.inlineCallbacks
def test_https_tunnel_auth_error(self):
os.environ['https_proxy'] = _wrong_credentials(os.environ['https_proxy'])
crawler = get_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl(self.mockserver.url("/status?n=200", is_secure=True))
# The proxy returns a 407 error code but it does not reach the client;
# he just sees a TunnelError.
self._assert_got_tunnel_error(l)
@defer.inlineCallbacks
def test_https_tunnel_without_leak_proxy_authorization_header(self):
request = Request(self.mockserver.url("/echo", is_secure=True))
crawler = get_crawler(SingleRequestSpider)
with LogCapture() as l:
yield crawler.crawl(seed=request)
self._assert_got_response_code(200, l)
echo = json.loads(crawler.spider.meta['responses'][0].body)
self.assertTrue('Proxy-Authorization' not in echo['headers'])
@defer.inlineCallbacks
def test_https_noconnect_auth_error(self):
os.environ['https_proxy'] = _wrong_credentials(os.environ['https_proxy']) + '?noconnect'
crawler = get_crawler(SimpleSpider)
with LogCapture() as l:
yield crawler.crawl(self.mockserver.url("/status?n=200", is_secure=True))
self._assert_got_response_code(407, l)
def _assert_got_response_code(self, code, log):
print(log)
self.assertEqual(str(log).count('Crawled (%d)' % code), 1)
def _assert_got_tunnel_error(self, log):
print(log)
self.assertIn('TunnelError', str(log))