本文整理汇总了Python中scrapy.mail.MailSender.from_settings方法的典型用法代码示例。如果您正苦于以下问题:Python MailSender.from_settings方法的具体用法?Python MailSender.from_settings怎么用?Python MailSender.from_settings使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scrapy.mail.MailSender
的用法示例。
在下文中一共展示了MailSender.from_settings方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: closed
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def closed(self,reason):
str = ''
#conn = MySQLdb.connect(host='127.0.0.1',user='spider_user',passwd='[email protected]#',port=3306,db='db_spider',charset='utf8')
#cur = conn.cursor()
#mydict = {"name":"Lucy", "sex":"female","job":"nurse"}
for index,item in enumerate(self.web_data_list):
tmp = 'index:%d, userid:%s, author:%s,head_img:%s \n,age:%s,sex:%s, vote:%s,contentid:%s\n[%s]\n\n' % (index,item['userid'],item['author'],item['head_img'],item['age'],item['sex'],item['stats_vote'],item['contentid'],item['content'])
str = str + tmp
author=item['author']
content=item['content']
stats_vote = item['stats_vote']
contentid=item['contentid']
#sql="insert ignore into t_qiushi(author,content,vote,content_id) values('%s','%s','%s','%s')" % (author,content,stats_vote,contentid)
#cur.execute(sql)
#print str
#conn.commit()
#cur.close()
#conn.close()
#将爬取的数据发送邮件
settings = get_project_settings()
mailer = MailSender.from_settings(settings)
示例2: send_mail
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def send_mail(self, month_year):
subject = 'Bonn: Neuer Termin frei im ' + month_year
body = self.start_urls[0]
# you have to set up the mail settings in your own settings.py
# http://doc.scrapy.org/en/latest/topics/email.html#topics-email-settings
mailer = MailSender.from_settings(self.settings)
mailer.send(to=[self.notification_email], subject=subject, body=body)
示例3: from_crawler
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def from_crawler(cls, crawler):
recipients = crawler.settings.getlist("STATSMAILER_RCPTS")
mail = MailSender.from_settings(crawler.settings)
o = cls(crawler.stats, recipients, mail)
crawler.signals.connect(o.engine_stopped, signal=signals.engine_stopped)
crawler.signals.connect(o.engine_started, signal=signals.engine_started)
return o
示例4: from_crawler
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def from_crawler(cls, crawler):
recipients = crawler.settings.getlist("STATSMAILER_RCPTS")
if not recipients:
raise NotConfigured
mail = MailSender.from_settings(crawler.settings)
o = cls(crawler.stats, recipients, mail)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
示例5: closed
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def closed(self,reason):
self.logger.info("Spider closed: %s"%str(reason))
mailer = MailSender.from_settings(self.settings)
mailer.send(
to=["[email protected]"],
subject="Spider closed",
body=str(self.crawler.stats.get_stats()),
cc=["[email protected]"]
)
示例6: from_crawler
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def from_crawler(cls, crawler):
mail_list = crawler.settings.getlist("ERRMAIL_LIST")
if not mail_list:
raise NotConfigured
mail = MailSender.from_settings(crawler.settings)
o = cls(crawler.stats, mail_list, mail)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
示例7: parse
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def parse(self, response):
mailer = MailSender.from_settings(settings)
try:
mailer.send(to=["[email protected]"],subject="scrapy spider",body="test message",cc=['[email protected]'],charset="utf-8")
except Exception as e :
msg = "Error occurred...{0}".format(str(e))
print(msg)
print('mail sending')
示例8: from_crawler
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def from_crawler(cls, crawler):
mail = MailSender.from_settings(crawler.settings)
instance = cls(mail)
crawler.signals.connect(instance.spider_opened, signal=signals.spider_opened)
crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
crawler.signals.connect(instance.item_scraped, signal=signals.item_scraped)
return instance
示例9: closed
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def closed(self,reason):
import pdb;pdb.set_trace()
self.logger.info("Spider closed: %s"%str(reason))
mailer = MailSender.from_settings(self.settings)
mailer.send(
to=["******@qq.com"],
subject="Spider closed",
body=str(self.crawler.stats.get_stats()),
cc=["**********@xxxxxxxx.com"]
)
示例10: close_spider
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def close_spider(self, spider):
self.persist_dict.close()
if not self.email_list:
return
email_str = "\n\n".join(self.email_list)
mailer = MailSender.from_settings(spider.settings)
with open('list.csv', 'r') as csv_file:
mailer.send(
to = ["[email protected]"],
subject = "Scrapy Info",
body = email_str,
attachs = [('scrapy_info.csv', 'text/csv', csv_file)],
)
示例11: parse
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def parse(self, response):
items = [ ]
mailer = MailSender.from_settings(self.settings)
sel = scrapy.Selector(response)
posts = sel.xpath('//div [@class="wall_item"]')
for post in posts:
item = HonlineItem()
#AUTHOR = post.xpath('.//div[1]//div[1]//div[1]//a[1]/text()').extract() #wi_head/wi_cont/wi_author/a
item['post_link'] = str(post.xpath('.//div[1]//div[1]//div[2]//a[1]/@href').extract()[0])
item['post_time'] = str(post.xpath('.//div[1]//div[1]//div[2]//a[1]/text()').extract()[0])
item['key'] = (post.re('\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d')) #" 289276165354594 "
if len(item['key']) > 0:
item['key'] = str(item['key'][0])
items.append(item)
return items
示例12: __init__
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def __init__(self, crawler):
if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
raise NotConfigured
try:
self.resource = __import__('resource')
except ImportError:
raise NotConfigured
self.crawler = crawler
self.warned = False
self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
self.report = crawler.settings.getbool('MEMUSAGE_REPORT')
self.mail = MailSender.from_settings(crawler.settings)
crawler.signals.connect(self.engine_started, signal=signals.engine_started)
crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
示例13: __init__
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def __init__(self, crawler):
if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
raise NotConfigured
try:
# stdlib's resource module is only available on unix platforms.
self.resource = import_module('resource')
except ImportError:
raise NotConfigured
self.crawler = crawler
self.warned = False
self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')
self.mail = MailSender.from_settings(crawler.settings)
crawler.signals.connect(self.engine_started, signal=signals.engine_started)
crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
示例14: close_spider
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def close_spider(self, spider):
self.logfile.write("stock pipeline finish \n")
pipelog = open("stockpipeline.txt")
if spider.name == "nasdaq":
# mail body
mail_body = "please consider the following {count} stocks: \n".format(count=len(self.emailContent))
for name, content in self.emailContent.items():
mail_body += "{name} {currentprice} {yearlowprice} {yearhighprice} {sharevolume} \n".format(
name=name, currentprice=content[0], yearlowprice=content[1], yearhighprice=content[2], sharevolume=content[3])
nasdaqlog = open("nasdaqcrawl.txt")
attachment = [('nasdaqlog.txt', 'text/plain', nasdaqlog), ('pipelog.txt', 'text/plain', pipelog)]
mailer = MailSender.from_settings(emailSettings())
mailer.send(to=["[email protected]"],
subject='nasdaq spider finish', body=mail_body, cc=["[email protected]"],
attachs=attachment)
nasdaqlog.close()
pipelog.close()
self.logfile.close()
self.session.close()
示例15: from_crawler
# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
def from_crawler(cls, crawler):
recipients = crawler.settings.getlist('STATUSMAILER_RECIPIENTS')
compression = crawler.settings.get('STATUSMAILER_COMPRESSION')
if not compression:
compressor = PlainCompressor
elif compression.lower().startswith('gz'):
compressor = GzipCompressor
else:
raise NotConfigured
if not recipients:
raise NotConfigured
mail = MailSender.from_settings(crawler.settings)
instance = cls(recipients, mail, compressor, crawler)
crawler.signals.connect(instance.item_scraped, signal=signals.item_scraped)
crawler.signals.connect(instance.spider_error, signal=signals.spider_error)
crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
crawler.signals.connect(instance.request_received, signal=signals.request_received)
return instance