当前位置: 首页>>代码示例>>Python>>正文


Python MailSender.from_settings方法代码示例

本文整理汇总了Python中scrapy.mail.MailSender.from_settings方法的典型用法代码示例。如果您正苦于以下问题:Python MailSender.from_settings方法的具体用法?Python MailSender.from_settings怎么用?Python MailSender.from_settings使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scrapy.mail.MailSender的用法示例。


在下文中一共展示了MailSender.from_settings方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: closed

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def closed(self,reason):
        str = ''
        #conn = MySQLdb.connect(host='127.0.0.1',user='spider_user',passwd='[email protected]#',port=3306,db='db_spider',charset='utf8')
        #cur = conn.cursor()
        
        #mydict = {"name":"Lucy", "sex":"female","job":"nurse"}
        

        for index,item in enumerate(self.web_data_list):
    	    tmp = 'index:%d, userid:%s, author:%s,head_img:%s \n,age:%s,sex:%s, vote:%s,contentid:%s\n[%s]\n\n' % (index,item['userid'],item['author'],item['head_img'],item['age'],item['sex'],item['stats_vote'],item['contentid'],item['content'])
            str = str + tmp
            author=item['author']
            content=item['content']
            stats_vote = item['stats_vote']
            contentid=item['contentid']
            
            #sql="insert ignore into t_qiushi(author,content,vote,content_id) values('%s','%s','%s','%s')" % (author,content,stats_vote,contentid)
            #cur.execute(sql)
        #print str
        #conn.commit()
        #cur.close()
        #conn.close()
        
        #将爬取的数据发送邮件
        settings = get_project_settings()
        mailer = MailSender.from_settings(settings)
开发者ID:zhifeiji,项目名称:tutorial,代码行数:28,代码来源:qiushi_spider.py

示例2: send_mail

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def send_mail(self, month_year):
     subject = 'Bonn: Neuer Termin frei im ' + month_year
     body = self.start_urls[0]
     # you have to set up the mail settings in your own settings.py
     # http://doc.scrapy.org/en/latest/topics/email.html#topics-email-settings
     mailer = MailSender.from_settings(self.settings)
     mailer.send(to=[self.notification_email], subject=subject, body=body)
开发者ID:tobihagemann,项目名称:bonntermin,代码行数:9,代码来源:bonn.py

示例3: from_crawler

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def from_crawler(cls, crawler):
     recipients = crawler.settings.getlist("STATSMAILER_RCPTS")
     mail = MailSender.from_settings(crawler.settings)
     o = cls(crawler.stats, recipients, mail)
     crawler.signals.connect(o.engine_stopped, signal=signals.engine_stopped)
     crawler.signals.connect(o.engine_started, signal=signals.engine_started)
     return o
开发者ID:nyov,项目名称:scrapyext,代码行数:9,代码来源:statsmailer.py

示例4: from_crawler

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def from_crawler(cls, crawler):
     recipients = crawler.settings.getlist("STATSMAILER_RCPTS")
     if not recipients:
         raise NotConfigured
     mail = MailSender.from_settings(crawler.settings)
     o = cls(crawler.stats, recipients, mail)
     crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
     return o
开发者ID:InternetDataMiningLaboratory,项目名称:crawler_code,代码行数:10,代码来源:close_sender.py

示例5: closed

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def closed(self,reason):
     self.logger.info("Spider closed: %s"%str(reason))
     mailer = MailSender.from_settings(self.settings)
     mailer.send(
         to=["[email protected]"],
         subject="Spider closed",
         body=str(self.crawler.stats.get_stats()),
         cc=["[email protected]"]
         )
开发者ID:seperinna,项目名称:scrapyProject,代码行数:11,代码来源:renthouse.py

示例6: from_crawler

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def from_crawler(cls, crawler):
        mail_list = crawler.settings.getlist("ERRMAIL_LIST")
        if not mail_list:
            raise NotConfigured
        mail = MailSender.from_settings(crawler.settings)
        o = cls(crawler.stats, mail_list, mail)
        crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)

        return o
开发者ID:hackrole,项目名称:scrapy-utils,代码行数:11,代码来源:logerrmail.py

示例7: parse

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def parse(self, response):
    	mailer = MailSender.from_settings(settings)
    	try:
        	mailer.send(to=["[email protected]"],subject="scrapy spider",body="test message",cc=['[email protected]'],charset="utf-8")
    	except Exception as e :
        	msg = "Error occurred...{0}".format(str(e))
        	print(msg)


    	print('mail sending')
开发者ID:Andy-wangke,项目名称:Front_end,代码行数:12,代码来源:redditbot_basic.py

示例8: from_crawler

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def from_crawler(cls, crawler):
        mail = MailSender.from_settings(crawler.settings)

        instance = cls(mail)

        crawler.signals.connect(instance.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(instance.item_scraped, signal=signals.item_scraped)

        return instance
开发者ID:chyzas,项目名称:SiFScraper,代码行数:12,代码来源:extensions.py

示例9: closed

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def closed(self,reason):
     import pdb;pdb.set_trace()
     self.logger.info("Spider closed: %s"%str(reason))
     mailer = MailSender.from_settings(self.settings)
     mailer.send(
         to=["******@qq.com"], 
         subject="Spider closed", 
         body=str(self.crawler.stats.get_stats()), 
         cc=["**********@xxxxxxxx.com"]
         )
开发者ID:joyceloo,项目名称:learn_Notes,代码行数:12,代码来源:bioonspider.py

示例10: close_spider

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
 def close_spider(self, spider):
     self.persist_dict.close()
     if not self.email_list:
         return
     email_str = "\n\n".join(self.email_list)
     mailer = MailSender.from_settings(spider.settings)
     with open('list.csv', 'r') as csv_file:
         mailer.send(
             to = ["[email protected]"],
             subject = "Scrapy Info",
             body = email_str,
             attachs = [('scrapy_info.csv', 'text/csv', csv_file)],
         )
开发者ID:milroc,项目名称:craigslist,代码行数:15,代码来源:pipelines.py

示例11: parse

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def parse(self, response):
        items = [ ]
        mailer = MailSender.from_settings(self.settings)
        sel = scrapy.Selector(response)
        posts = sel.xpath('//div [@class="wall_item"]')
        for post in posts:
            item = HonlineItem()
            #AUTHOR = post.xpath('.//div[1]//div[1]//div[1]//a[1]/text()').extract() #wi_head/wi_cont/wi_author/a
            item['post_link'] = str(post.xpath('.//div[1]//div[1]//div[2]//a[1]/@href').extract()[0])
            item['post_time'] = str(post.xpath('.//div[1]//div[1]//div[2]//a[1]/text()').extract()[0])
            item['key'] = (post.re('\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d')) #" 289276165354594 "

            if len(item['key']) > 0:
                item['key'] = str(item['key'][0])
                items.append(item)
        return items
开发者ID:ChbShoot,项目名称:HonlineCrawl,代码行数:18,代码来源:CheeseSpider.py

示例12: __init__

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def __init__(self, crawler):
        if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
            raise NotConfigured
        try:
            self.resource = __import__('resource')
        except ImportError:
            raise NotConfigured

        self.crawler = crawler
        self.warned = False
        self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
        self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
        self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
        self.report = crawler.settings.getbool('MEMUSAGE_REPORT')
        self.mail = MailSender.from_settings(crawler.settings)
        crawler.signals.connect(self.engine_started, signal=signals.engine_started)
        crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
开发者ID:00gpowe,项目名称:scrapy,代码行数:19,代码来源:memusage.py

示例13: __init__

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def __init__(self, crawler):
        if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
            raise NotConfigured
        try:
            # stdlib's resource module is only available on unix platforms.
            self.resource = import_module('resource')
        except ImportError:
            raise NotConfigured

        self.crawler = crawler
        self.warned = False
        self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
        self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
        self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
        self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')
        self.mail = MailSender.from_settings(crawler.settings)
        crawler.signals.connect(self.engine_started, signal=signals.engine_started)
        crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
开发者ID:ArturGaspar,项目名称:scrapy,代码行数:20,代码来源:memusage.py

示例14: close_spider

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
	def close_spider(self, spider):
		self.logfile.write("stock pipeline finish \n")
		pipelog = open("stockpipeline.txt")
		if spider.name == "nasdaq":
			# mail body
			mail_body = "please consider the following {count} stocks: \n".format(count=len(self.emailContent))
			for name, content in self.emailContent.items():
				mail_body += "{name}	{currentprice}	{yearlowprice}	{yearhighprice}	{sharevolume} \n".format(
					name=name, currentprice=content[0], yearlowprice=content[1], yearhighprice=content[2], sharevolume=content[3])

			nasdaqlog = open("nasdaqcrawl.txt")
			attachment = [('nasdaqlog.txt', 'text/plain', nasdaqlog), ('pipelog.txt', 'text/plain', pipelog)]
			mailer = MailSender.from_settings(emailSettings())
			mailer.send(to=["[email protected]"],
						subject='nasdaq spider finish', body=mail_body, cc=["[email protected]"],
						attachs=attachment)
			nasdaqlog.close()
		pipelog.close()
		self.logfile.close()
		self.session.close()
开发者ID:leozhao0709,项目名称:fsc,代码行数:22,代码来源:pipelines.py

示例15: from_crawler

# 需要导入模块: from scrapy.mail import MailSender [as 别名]
# 或者: from scrapy.mail.MailSender import from_settings [as 别名]
    def from_crawler(cls, crawler):
        recipients = crawler.settings.getlist('STATUSMAILER_RECIPIENTS')
        compression = crawler.settings.get('STATUSMAILER_COMPRESSION')

        if not compression:
            compressor = PlainCompressor
        elif compression.lower().startswith('gz'):
            compressor = GzipCompressor
        else:
            raise NotConfigured

        if not recipients:
            raise NotConfigured

        mail = MailSender.from_settings(crawler.settings)
        instance = cls(recipients, mail, compressor, crawler)

        crawler.signals.connect(instance.item_scraped, signal=signals.item_scraped)
        crawler.signals.connect(instance.spider_error, signal=signals.spider_error)
        crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(instance.request_received, signal=signals.request_received)

        return instance
开发者ID:JayveeHe,项目名称:spider_senz,代码行数:25,代码来源:statusmailer.py


注:本文中的scrapy.mail.MailSender.from_settings方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。