本文整理汇总了Python中spider.Spider.crawl方法的典型用法代码示例。如果您正苦于以下问题:Python Spider.crawl方法的具体用法?Python Spider.crawl怎么用?Python Spider.crawl使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类spider.Spider
的用法示例。
在下文中一共展示了Spider.crawl方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: log_it
# 需要导入模块: from spider import Spider [as 别名]
# 或者: from spider.Spider import crawl [as 别名]
log_it("TEMP DIR",temp_dire)
if os.path.exists(temp_dire):
shutil.rmtree(temp_dire)
distutils.dir_util.copy_tree(src_dir,temp_dire)
owd = os.getcwd()
log_it("LOG","Crawling started")
spider = Spider(temp_dire)
log_it("LOG","Crawling done")
# spider.crawl()
log_it("LOG","Compileing pages started")
posts_data=[]
for post_folder in spider.crawl():
config = json.load(open(os.path.join(post_folder,"__pub.lish")))
t_date = time.strptime(config['date'],"%Y-%m-%d")
posts_data.append({
'title': config['name'].replace('-', ' '),
'url' : post_folder[len(temp_dire)+1:],
'year' : time.strftime("%Y",t_date),
'day' : time.strftime("%d",t_date),
'month': time.strftime("%b",t_date),
'date' : t_date
})
compiler = Compilers[config['type']]
owd = os.getcwd()
os.chdir(post_folder)
compiler.compile(config['file'])
os.chdir(owd)
示例2: startCrawl
# 需要导入模块: from spider import Spider [as 别名]
# 或者: from spider.Spider import crawl [as 别名]
def startCrawl(self):
spider = Spider(self.userList)
spider.crawl(self.hasProcessed)
示例3: Spider
# 需要导入模块: from spider import Spider [as 别名]
# 或者: from spider.Spider import crawl [as 别名]
conn = MySQLdb.connect(host=config.db_host,
user=config.db_user,
passwd=config.db_password,
db=config.db_database,
charset='utf8')
cursor = conn.cursor()
cursor.execute('select configValue from t_spider_config where configKey=%s',
(arg_config.get(sys.argv[1]),))
config_values = [row[0] for row in cursor.fetchall()]
if sys.argv[1] == 'paper':
spider_paper = Spider('paper')
for search_exp in config_values:
reqs = parser.paper_page_parser(search_exp)[:500]
for req in reqs:
spider_paper.add_request(req)
spider_paper.crawl()
if sys.argv[1] == 'news':
spider_news = Spider('news')
for seed_url in config_values:
spider_news.add_request(Request(arg=seed_url,
parser=parser.news_parser))
spider_news.crawl()
if sys.argv[1] == 'patent':
spider_patent = Spider('patent')
for search_exp in config_values:
spider_patent.add_request(Request(arg=search_exp,
parser=parser.patent_parser))
spider_patent.crawl()