本文整理汇总了Python中pages.link_crawler.LinkCrawler.verify_status_code_is_ok方法的典型用法代码示例。如果您正苦于以下问题:Python LinkCrawler.verify_status_code_is_ok方法的具体用法?Python LinkCrawler.verify_status_code_is_ok怎么用?Python LinkCrawler.verify_status_code_is_ok使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pages.link_crawler.LinkCrawler
的用法示例。
在下文中一共展示了LinkCrawler.verify_status_code_is_ok方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_that_links_in_the_faq_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_faq_page_return_200_code(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links("/faq/", id="wrapper")
bad_urls = []
Assert.greater(len(urls), 0, "The link crawler did not find any urls to crawl")
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(0, len(bad_urls), "%s bad links found. " % len(bad_urls) + ", ".join(bad_urls))
示例2: test_that_links_in_footer_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_footer_return_200_code(self, base_url):
crawler = LinkCrawler(base_url)
urls = crawler.collect_links("/", name="footer")
bad_urls = []
assert len(urls) > 0
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
assert 0 == len(bad_urls), u"%s bad links found. " % len(bad_urls) + ", ".join(bad_urls)
示例3: test_that_links_in_the_about_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_about_page_return_200_code(self, base_url):
crawler = LinkCrawler(base_url)
urls = crawler.collect_links('/about', id='main')
bad_urls = []
assert len(urls) > 0
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
示例4: test_that_links_in_the_about_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_about_page_return_200_code(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links("/about", id="main")
bad_urls = []
Assert.greater(len(urls), 0, u"something went wrong. no links found.")
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(0, len(bad_urls), u"%s bad links found. " % len(bad_urls) + ", ".join(bad_urls))
示例5: test_home_page_links
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_home_page_links(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links('/', id='content')
bad_urls = []
Assert.greater(len(urls), 0, u'Something went wrong. No links found.')
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
示例6: test_that_links_in_the_about_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_about_page_return_200_code(self, base_url):
crawler = LinkCrawler(base_url)
urls = crawler.collect_links('/about', id='main')
bad_urls = []
Assert.greater(
len(urls), 0, u'something went wrong. no links found.')
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
示例7: test_that_links_in_footer_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_footer_return_200_code(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links('/', name='footer')
bad_urls = []
Assert.greater(
len(urls), 0, u'something went wrong. no links found.')
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
示例8: test_community_page_links
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_community_page_links(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links('/community', id='activity-stream')
bad_urls = []
Assert.greater(
len(urls), 0, u'something went wrong. no links found.')
for url in urls:
if not 'irc://irc.mozilla.org' and not 'mailto:' in url:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
示例9: test_that_links_in_the_labs_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_labs_page_return_200_code(self, mozwebqa):
crawler = LinkCrawler(mozwebqa)
urls = crawler.collect_links('/labs/', id='wrapper')
bad_urls = []
Assert.greater(
len(urls), 0,
'The link crawler did not find any urls to crawl')
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
示例10: test_that_links_in_the_services_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_services_page_return_200_code(self, base_url, selenium, vouched_user):
home_page = Home(base_url, selenium)
home_page.login(vouched_user['email'], vouched_user['password'])
settings = home_page.header.click_settings_menu_item()
developer = settings.developer
crawler = LinkCrawler(base_url)
urls = developer.get_services_urls()
bad_urls = []
assert len(urls) > 0
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
示例11: test_that_links_in_the_services_page_return_200_code
# 需要导入模块: from pages.link_crawler import LinkCrawler [as 别名]
# 或者: from pages.link_crawler.LinkCrawler import verify_status_code_is_ok [as 别名]
def test_that_links_in_the_services_page_return_200_code(self, mozwebqa):
home_page = Home(mozwebqa)
home_page.login()
edit_profile_page = home_page.header.click_edit_profile_menu_item()
crawler = LinkCrawler(mozwebqa)
urls = edit_profile_page.get_services_urls()
bad_urls = []
Assert.greater(
len(urls), 0, u'something went wrong. no links found.')
for url in urls:
check_result = crawler.verify_status_code_is_ok(url)
if check_result is not True:
bad_urls.append(check_result)
Assert.equal(
0, len(bad_urls),
u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))