本文整理汇总了Python中Tools.Tools.save_file方法的典型用法代码示例。如果您正苦于以下问题:Python Tools.save_file方法的具体用法?Python Tools.save_file怎么用?Python Tools.save_file使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tools.Tools
的用法示例。
在下文中一共展示了Tools.save_file方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Sale_Spider
# 需要导入模块: from Tools import Tools [as 别名]
# 或者: from Tools.Tools import save_file [as 别名]
class Sale_Spider(BaseSpider):
name = "sale"
db = None
tools = None
debug = True
level_id = ''
attributes = []
domain = "http://www.autohome.com.cn/"
allowed_domains = ["autohome.com.cn"]
start_urls = [
"http://www.autohome.com.cn/b/",
]
# parse start
def parse(self, response):
urls = []
handle = open("./files/sale_urls.log", 'r')
while True:
url = handle.readline()
if len(url) == 0:
break
urls.append(url[0:-1])
handle.close()
self.tools = Tools()
for url in urls:
yield scrapy.Request(url, callback=self.parse_page)
def parse_page(self, response):
re_url = re.findall(r"(\d+)", response.url)
model_id = re_url[0]
re_cars = re.findall(r"<a href=\""+model_id+"\/(\d+)\/options\.html\">.*?<\/a>", response.body)
if len(re_cars) > 0 :
for car_id in re_cars :
option_url = self.tools.build_sub_option_url(model_id, car_id)
self.tools.save_file("sub_option_urls.log", option_url)
示例2: Models_Spider
# 需要导入模块: from Tools import Tools [as 别名]
# 或者: from Tools.Tools import save_file [as 别名]
class Models_Spider(BaseSpider):
name = "models"
db = None
tools = None
debug = True
level_id = ''
attributes = []
domain = "http://www.autohome.com.cn/"
allowed_domains = ["autohome.com.cn"]
start_urls = [
"http://www.autohome.com.cn/a00/"
]
start_urls1 = [
"http://www.autohome.com.cn/a00/",
"http://www.autohome.com.cn/a0/",
"http://www.autohome.com.cn/a/",
"http://www.autohome.com.cn/b/",
"http://www.autohome.com.cn/c/",
"http://www.autohome.com.cn/d/",
"http://www.autohome.com.cn/suv/",
"http://www.autohome.com.cn/mpv/",
"http://www.autohome.com.cn/s/"
]
# parse document
def parse(self, response):
level = re.findall("\/(\w*?)\/$", response.url)
self.level_id = level[0]
self.db = MysqlDB()
self.db.connection()
self.tools = Tools()
option = re.findall(r"<dt><a href=\".*?brand-(\d+)\.html[\w|\W]*?<ul class=\"rank-list-ul\">([\w|\W]*?)<\/ul>", response.body)
for _html in option :
if len(_html) > 0 :
parent_id = _html[0]
option_urls = self.sub_parse(parent_id, _html[1])
for option_url in option_urls:
yield scrapy.Request(option_url, callback=self.parse_option_page)
exit()
self.db.commit()
self.db.close()
def sub_parse(self, parent_id, html):
option_urls = []
_items = re.findall(r"<li data-state=\"\d+\" >([\w|\W]*?)<\/li>", html)
for k in range(0, len(_items)):
car_id = re.findall(r"<a class=\"red\" href=\"\/(\d+)\/.*?\">", _items[k])
car_name = re.findall(r"\s+<h4.*?><a href=\".*?\">(.*?)<\/a>", _items[k])
car_price = re.findall(r"<a class=\"red\" href=\".*?\">([\d|\.]+)-([\d|\.]+)", _items[k])
if len(car_id) > 0 and len(car_name) > 0 and len(car_price[0]) == 2 :
option_url = self.tools.build_option_url(car_id[0])
self.tools.save_file("option_urls.log", option_url)
#insert_sql = self.build_sql(car_id[0], parent_id, car_name[0], car_price[0][0], car_price[0][1])
#self.db.query(insert_sql)
option_urls.append(option_url)
return option_urls
def parse_option_page(self, response):
sale_page = re.findall(r"<a href=\"(.*?)\" class=\"link-sale\">.*?<\/a>", response.body)
if self.debug:
print "************************************* Parse Option Page *********************************************"
if len(sale_page) == 1:
sale_url = self.domain + sale_page[0][1:]
self.tools.save_file("sale_urls.log", sale_url)
def build_sql(self, car_id, parent_id, car_name, min_price, max_price):
sql = "INSERT INTO car_models (`id`,`level_id`,`company_id`,`parent_id`,`cn_name`,`en_name`,`min_price`,`max_price`)VALUES"
sql += "(" + car_id + ",'" + self.level_id + "',0," + parent_id + ",'" + car_name.decode("GB2312") + "',''," + min_price + "," + max_price + ")"
if self.debug:
print "************************************* SQL *********************************************"
if self.debug:
print sql
return sql